gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Copyright 2014 Andreas Schildbach * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dogecoindark.dogecoindarkj.utils; import static com.google.common.base.Preconditions.checkArgument; import java.io.Serializable; import java.math.BigDecimal; import com.dogecoindark.dogecoindarkj.core.Monetary; import com.google.common.math.LongMath; /** * Represents a monetary fiat value. It was decided to not fold this into {@link Coin} because of type safety. Fiat * values always come with an attached currency code. * * This class is immutable. */ public final class Fiat implements Monetary, Comparable<Fiat>, Serializable { /** * The absolute value of exponent of the value of a "smallest unit" in scientific notation. We picked 4 rather than * 2, because in financial applications it's common to use sub-cent precision. * Since a single DOGE has a low fiat value we need to up the precision here. */ public static final int SMALLEST_UNIT_EXPONENT = 8; /** * The number of smallest units of this monetary value. */ public final long value; public final String currencyCode; private Fiat(final String currencyCode, final long value) { this.value = value; this.currencyCode = currencyCode; } public static Fiat valueOf(final String currencyCode, final long value) { return new Fiat(currencyCode, value); } @Override public int smallestUnitExponent() { return SMALLEST_UNIT_EXPONENT; } /** * Returns the number of "smallest units" of this monetary value. */ @Override public long getValue() { return value; } public String getCurrencyCode() { return currencyCode; } /** * Parses an amount expressed in the way humans are used to. * <p> * <p/> * This takes string in a format understood by {@link BigDecimal#BigDecimal(String)}, for example "0", "1", "0.10", * "1.23E3", "1234.5E-5". * * @throws IllegalArgumentException * if you try to specify fractional satoshis, or a value out of range. */ public static Fiat parseFiat(final String currencyCode, final String str) { try { long val = new BigDecimal(str).movePointRight(SMALLEST_UNIT_EXPONENT) .toBigIntegerExact().longValue(); return Fiat.valueOf(currencyCode, val); } catch (ArithmeticException e) { throw new IllegalArgumentException(e); } } public Fiat add(final Fiat value) { checkArgument(value.currencyCode.equals(currencyCode)); return new Fiat(currencyCode, LongMath.checkedAdd(this.value, value.value)); } public Fiat subtract(final Fiat value) { checkArgument(value.currencyCode.equals(currencyCode)); return new Fiat(currencyCode, LongMath.checkedSubtract(this.value, value.value)); } public Fiat multiply(final long factor) { return new Fiat(currencyCode, LongMath.checkedMultiply(this.value, factor)); } public Fiat divide(final long divisor) { return new Fiat(currencyCode, this.value / divisor); } public Fiat[] divideAndRemainder(final long divisor) { return new Fiat[] { new Fiat(currencyCode, this.value / divisor), new Fiat(currencyCode, this.value % divisor) }; } public long divide(final Fiat divisor) { checkArgument(divisor.currencyCode.equals(currencyCode)); return this.value / divisor.value; } /** * Returns true if and only if this instance represents a monetary value greater than zero, otherwise false. */ public boolean isPositive() { return signum() == 1; } /** * Returns true if and only if this instance represents a monetary value less than zero, otherwise false. */ public boolean isNegative() { return signum() == -1; } /** * Returns true if and only if this instance represents zero monetary value, otherwise false. */ public boolean isZero() { return signum() == 0; } /** * Returns true if the monetary value represented by this instance is greater than that of the given other Coin, * otherwise false. */ public boolean isGreaterThan(Fiat other) { return compareTo(other) > 0; } /** * Returns true if the monetary value represented by this instance is less than that of the given other Coin, * otherwise false. */ public boolean isLessThan(Fiat other) { return compareTo(other) < 0; } @Override public int signum() { if (this.value == 0) return 0; return this.value < 0 ? -1 : 1; } public Fiat negate() { return new Fiat(currencyCode, -this.value); } /** * Returns the number of satoshis of this monetary value. It's deprecated in favour of accessing {@link #value} * directly. */ public long longValue() { return this.value; } private static final MonetaryFormat FRIENDLY_FORMAT = MonetaryFormat.FIAT.postfixCode(); /** * Returns the value as a 0.12 type string. More digits after the decimal place will be used if necessary, but two * will always be present. */ public String toFriendlyString() { return FRIENDLY_FORMAT.code(0, currencyCode).format(this).toString(); } private static final MonetaryFormat PLAIN_FORMAT = MonetaryFormat.FIAT.minDecimals(0).repeatOptionalDecimals(1, 4).noCode(); /** * <p> * Returns the value as a plain string denominated in BTC. The result is unformatted with no trailing zeroes. For * instance, a value of 150000 satoshis gives an output string of "0.0015" BTC * </p> */ public String toPlainString() { return PLAIN_FORMAT.format(this).toString(); } @Override public String toString() { return Long.toString(value); } @Override public boolean equals(final Object o) { if (o == this) return true; if (o == null || o.getClass() != getClass()) return false; final Fiat other = (Fiat) o; if (this.value != other.value) return false; if (!this.currencyCode.equals(other.currencyCode)) return false; return true; } @Override public int hashCode() { return (int) this.value + 37 * this.currencyCode.hashCode(); } @Override public int compareTo(final Fiat other) { if (!this.currencyCode.equals(other.currencyCode)) return this.currencyCode.compareTo(other.currencyCode); if (this.value != other.value) return this.value > other.value ? 1 : -1; return 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.common.util; import org.apache.solr.common.EnumFieldValue; import org.noggit.CharArr; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.*; import java.util.Map.Entry; import java.nio.ByteBuffer; /** * The class is designed to optimaly serialize/deserialize any supported types in Solr response. As we know there are only a limited type of * items this class can do it with very minimal amount of payload and code. There are 15 known types and if there is an * object in the object tree which does not fall into these types, It must be converted to one of these. Implement an * ObjectResolver and pass it over It is expected that this class is used on both end of the pipes. The class has one * read method and one write method for each of the datatypes * <p/> * Note -- Never re-use an instance of this class for more than one marshal or unmarshall operation. Always create a new * instance. */ public class JavaBinCodec { public static final byte NULL = 0, BOOL_TRUE = 1, BOOL_FALSE = 2, BYTE = 3, SHORT = 4, DOUBLE = 5, INT = 6, LONG = 7, FLOAT = 8, DATE = 9, MAP = 10, SOLRDOC = 11, SOLRDOCLST = 12, BYTEARR = 13, ITERATOR = 14, /** * this is a special tag signals an end. No value is associated with it */ END = 15, SOLRINPUTDOC = 16, SOLRINPUTDOC_CHILDS = 17, ENUM_FIELD_VALUE = 18, MAP_ENTRY = 19, // types that combine tag + length (or other info) in a single byte TAG_AND_LEN = (byte) (1 << 5), STR = (byte) (1 << 5), SINT = (byte) (2 << 5), SLONG = (byte) (3 << 5), ARR = (byte) (4 << 5), // ORDERED_MAP = (byte) (5 << 5), // SimpleOrderedMap (a NamedList subclass, and more common) NAMED_LST = (byte) (6 << 5), // NamedList EXTERN_STRING = (byte) (7 << 5); private static byte VERSION = 2; private ObjectResolver resolver; protected FastOutputStream daos; public JavaBinCodec() { } public JavaBinCodec(ObjectResolver resolver) { this.resolver = resolver; } public void marshal(Object nl, OutputStream os) throws IOException { init(FastOutputStream.wrap(os)); try { daos.writeByte(VERSION); writeVal(nl); } finally { daos.flushBuffer(); } } /** expert: sets a new output stream */ public void init(FastOutputStream os) { daos = os; } byte version; public Object unmarshal(InputStream is) throws IOException { FastInputStream dis = FastInputStream.wrap(is); version = dis.readByte(); if (version != VERSION) { throw new RuntimeException("Invalid version (expected " + VERSION + ", but " + version + ") or the data in not in 'javabin' format"); } return readVal(dis); } public SimpleOrderedMap<Object> readOrderedMap(DataInputInputStream dis) throws IOException { int sz = readSize(dis); SimpleOrderedMap<Object> nl = new SimpleOrderedMap<Object>(); for (int i = 0; i < sz; i++) { String name = (String) readVal(dis); Object val = readVal(dis); nl.add(name, val); } return nl; } public NamedList<Object> readNamedList(DataInputInputStream dis) throws IOException { int sz = readSize(dis); NamedList<Object> nl = new NamedList<Object>(); for (int i = 0; i < sz; i++) { String name = (String) readVal(dis); Object val = readVal(dis); nl.add(name, val); } return nl; } public void writeNamedList(NamedList<?> nl) throws IOException { writeTag(nl instanceof SimpleOrderedMap ? ORDERED_MAP : NAMED_LST, nl.size()); for (int i = 0; i < nl.size(); i++) { String name = nl.getName(i); writeExternString(name); Object val = nl.getVal(i); writeVal(val); } } public void writeVal(Object val) throws IOException { if (writeKnownType(val)) { return; } else { Object tmpVal = val; if (resolver != null) { tmpVal = resolver.resolve(val, this); if (tmpVal == null) return; // null means the resolver took care of it fully if (writeKnownType(tmpVal)) return; } } writeVal(val.getClass().getName() + ':' + val.toString()); } protected static final Object END_OBJ = new Object(); protected byte tagByte; public Object readVal(DataInputInputStream dis) throws IOException { tagByte = dis.readByte(); // if ((tagByte & 0xe0) == 0) { // if top 3 bits are clear, this is a normal tag // OK, try type + size in single byte switch (tagByte >>> 5) { case STR >>> 5: return readStr(dis); case SINT >>> 5: return readSmallInt(dis); case SLONG >>> 5: return readSmallLong(dis); case ARR >>> 5: return readArray(dis); case ORDERED_MAP >>> 5: return readOrderedMap(dis); case NAMED_LST >>> 5: return readNamedList(dis); case EXTERN_STRING >>> 5: return readExternString(dis); } switch (tagByte) { case NULL: return null; case DATE: return new Date(dis.readLong()); case INT: return dis.readInt(); case BOOL_TRUE: return Boolean.TRUE; case BOOL_FALSE: return Boolean.FALSE; case FLOAT: return dis.readFloat(); case DOUBLE: return dis.readDouble(); case LONG: return dis.readLong(); case BYTE: return dis.readByte(); case SHORT: return dis.readShort(); case MAP: return readMap(dis); case SOLRDOC: return readSolrDocument(dis); case SOLRDOCLST: return readSolrDocumentList(dis); case BYTEARR: return readByteArray(dis); case ITERATOR: return readIterator(dis); case END: return END_OBJ; case SOLRINPUTDOC: return readSolrInputDocument(dis); case ENUM_FIELD_VALUE: return readEnumFieldValue(dis); case MAP_ENTRY: return readMapEntry(dis); } throw new RuntimeException("Unknown type " + tagByte); } public boolean writeKnownType(Object val) throws IOException { if (writePrimitive(val)) return true; if (val instanceof NamedList) { writeNamedList((NamedList<?>) val); return true; } if (val instanceof SolrDocumentList) { // SolrDocumentList is a List, so must come before List check writeSolrDocumentList((SolrDocumentList) val); return true; } if (val instanceof Collection) { writeArray((Collection) val); return true; } if (val instanceof Object[]) { writeArray((Object[]) val); return true; } if (val instanceof SolrDocument) { //this needs special treatment to know which fields are to be written if (resolver == null) { writeSolrDocument((SolrDocument) val); } else { Object retVal = resolver.resolve(val, this); if (retVal != null) { if (retVal instanceof SolrDocument) { writeSolrDocument((SolrDocument) retVal); } else { writeVal(retVal); } } } return true; } if (val instanceof SolrInputDocument) { writeSolrInputDocument((SolrInputDocument)val); return true; } if (val instanceof Map) { writeMap((Map) val); return true; } if (val instanceof Iterator) { writeIterator((Iterator) val); return true; } if (val instanceof Iterable) { writeIterator(((Iterable) val).iterator()); return true; } if (val instanceof EnumFieldValue) { writeEnumFieldValue((EnumFieldValue) val); return true; } if (val instanceof Map.Entry) { writeMapEntry((Map.Entry)val); return true; } return false; } public void writeTag(byte tag) throws IOException { daos.writeByte(tag); } public void writeTag(byte tag, int size) throws IOException { if ((tag & 0xe0) != 0) { if (size < 0x1f) { daos.writeByte(tag | size); } else { daos.writeByte(tag | 0x1f); writeVInt(size - 0x1f, daos); } } else { daos.writeByte(tag); writeVInt(size, daos); } } public void writeByteArray(byte[] arr, int offset, int len) throws IOException { writeTag(BYTEARR, len); daos.write(arr, offset, len); } public byte[] readByteArray(DataInputInputStream dis) throws IOException { byte[] arr = new byte[readVInt(dis)]; dis.readFully(arr); return arr; } public void writeSolrDocument(SolrDocument doc) throws IOException { writeTag(SOLRDOC); writeTag(ORDERED_MAP, doc.size()); for (Map.Entry<String, Object> entry : doc) { String name = entry.getKey(); writeExternString(name); Object val = entry.getValue(); writeVal(val); } } public SolrDocument readSolrDocument(DataInputInputStream dis) throws IOException { NamedList nl = (NamedList) readVal(dis); SolrDocument doc = new SolrDocument(); for (int i = 0; i < nl.size(); i++) { String name = nl.getName(i); Object val = nl.getVal(i); doc.setField(name, val); } return doc; } public SolrDocumentList readSolrDocumentList(DataInputInputStream dis) throws IOException { SolrDocumentList solrDocs = new SolrDocumentList(); List list = (List) readVal(dis); solrDocs.setNumFound((Long) list.get(0)); solrDocs.setStart((Long) list.get(1)); solrDocs.setMaxScore((Float) list.get(2)); @SuppressWarnings("unchecked") List<SolrDocument> l = (List<SolrDocument>) readVal(dis); solrDocs.addAll(l); return solrDocs; } public void writeSolrDocumentList(SolrDocumentList docs) throws IOException { writeTag(SOLRDOCLST); List<Number> l = new ArrayList<Number>(3); l.add(docs.getNumFound()); l.add(docs.getStart()); l.add(docs.getMaxScore()); writeArray(l); writeArray(docs); } public SolrInputDocument readSolrInputDocument(DataInputInputStream dis) throws IOException { int sz = readVInt(dis); float docBoost = (Float)readVal(dis); SolrInputDocument sdoc = new SolrInputDocument(); sdoc.setDocumentBoost(docBoost); for (int i = 0; i < sz; i++) { float boost = 1.0f; String fieldName; Object obj = readVal(dis); // could be a boost, a field name, or a child document if (obj instanceof Float) { boost = (Float)obj; fieldName = (String)readVal(dis); } else if (obj instanceof SolrInputDocument) { sdoc.addChildDocument((SolrInputDocument)obj); continue; } else { fieldName = (String)obj; } Object fieldVal = readVal(dis); sdoc.setField(fieldName, fieldVal, boost); } return sdoc; } public void writeSolrInputDocument(SolrInputDocument sdoc) throws IOException { List<SolrInputDocument> children = sdoc.getChildDocuments(); int sz = sdoc.size() + (children==null ? 0 : children.size()); writeTag(SOLRINPUTDOC, sz); writeFloat(sdoc.getDocumentBoost()); for (SolrInputField inputField : sdoc.values()) { if (inputField.getBoost() != 1.0f) { writeFloat(inputField.getBoost()); } writeExternString(inputField.getName()); writeVal(inputField.getValue()); } if (children != null) { for (SolrInputDocument child : sdoc.getChildDocuments()) { writeSolrInputDocument(child); } } } public Map<Object,Object> readMap(DataInputInputStream dis) throws IOException { int sz = readVInt(dis); Map<Object,Object> m = new LinkedHashMap<Object,Object>(); for (int i = 0; i < sz; i++) { Object key = readVal(dis); Object val = readVal(dis); m.put(key, val); } return m; } public void writeIterator(Iterator iter) throws IOException { writeTag(ITERATOR); while (iter.hasNext()) { writeVal(iter.next()); } writeVal(END_OBJ); } public List<Object> readIterator(DataInputInputStream fis) throws IOException { ArrayList<Object> l = new ArrayList<Object>(); while (true) { Object o = readVal(fis); if (o == END_OBJ) break; l.add(o); } return l; } public void writeArray(List l) throws IOException { writeTag(ARR, l.size()); for (int i = 0; i < l.size(); i++) { writeVal(l.get(i)); } } public void writeArray(Collection coll) throws IOException { writeTag(ARR, coll.size()); for (Object o : coll) { writeVal(o); } } public void writeArray(Object[] arr) throws IOException { writeTag(ARR, arr.length); for (int i = 0; i < arr.length; i++) { Object o = arr[i]; writeVal(o); } } public List<Object> readArray(DataInputInputStream dis) throws IOException { int sz = readSize(dis); ArrayList<Object> l = new ArrayList<Object>(sz); for (int i = 0; i < sz; i++) { l.add(readVal(dis)); } return l; } /** * write {@link EnumFieldValue} as tag+int value+string value * @param enumFieldValue to write */ public void writeEnumFieldValue(EnumFieldValue enumFieldValue) throws IOException { writeTag(ENUM_FIELD_VALUE); writeInt(enumFieldValue.toInt()); writeStr(enumFieldValue.toString()); } public void writeMapEntry(Entry<Object,Object> val) throws IOException { writeTag(MAP_ENTRY); writeVal(val.getKey()); writeVal(val.getValue()); } /** * read {@link EnumFieldValue} (int+string) from input stream * @param dis data input stream * @return {@link EnumFieldValue} */ public EnumFieldValue readEnumFieldValue(DataInputInputStream dis) throws IOException { Integer intValue = (Integer) readVal(dis); String stringValue = (String) readVal(dis); return new EnumFieldValue(intValue, stringValue); } public Map.Entry<Object,Object> readMapEntry(DataInputInputStream dis) throws IOException { final Object key = readVal(dis); final Object value = readVal(dis); return new Map.Entry<Object,Object>() { @Override public Object getKey() { return key; } @Override public Object getValue() { return value; } @Override public String toString() { return "MapEntry[" + key.toString() + ":" + value.toString() + "]"; } @Override public Object setValue(Object value) { throw new UnsupportedOperationException(); }}; } /** * write the string as tag+length, with length being the number of UTF-8 bytes */ public void writeStr(String s) throws IOException { if (s == null) { writeTag(NULL); return; } int end = s.length(); int maxSize = end * 4; if (bytes == null || bytes.length < maxSize) bytes = new byte[maxSize]; int sz = ByteUtils.UTF16toUTF8(s, 0, end, bytes, 0); writeTag(STR, sz); daos.write(bytes, 0, sz); } byte[] bytes; CharArr arr = new CharArr(); public String readStr(DataInputInputStream dis) throws IOException { int sz = readSize(dis); if (bytes == null || bytes.length < sz) bytes = new byte[sz]; dis.readFully(bytes, 0, sz); arr.reset(); ByteUtils.UTF8toUTF16(bytes, 0, sz, arr); return arr.toString(); } public void writeInt(int val) throws IOException { if (val > 0) { int b = SINT | (val & 0x0f); if (val >= 0x0f) { b |= 0x10; daos.writeByte(b); writeVInt(val >>> 4, daos); } else { daos.writeByte(b); } } else { daos.writeByte(INT); daos.writeInt(val); } } public int readSmallInt(DataInputInputStream dis) throws IOException { int v = tagByte & 0x0F; if ((tagByte & 0x10) != 0) v = (readVInt(dis) << 4) | v; return v; } public void writeLong(long val) throws IOException { if ((val & 0xff00000000000000L) == 0) { int b = SLONG | ((int) val & 0x0f); if (val >= 0x0f) { b |= 0x10; daos.writeByte(b); writeVLong(val >>> 4, daos); } else { daos.writeByte(b); } } else { daos.writeByte(LONG); daos.writeLong(val); } } public long readSmallLong(DataInputInputStream dis) throws IOException { long v = tagByte & 0x0F; if ((tagByte & 0x10) != 0) v = (readVLong(dis) << 4) | v; return v; } public void writeFloat(float val) throws IOException { daos.writeByte(FLOAT); daos.writeFloat(val); } public boolean writePrimitive(Object val) throws IOException { if (val == null) { daos.writeByte(NULL); return true; } else if (val instanceof String) { writeStr((String) val); return true; } else if (val instanceof Number) { if (val instanceof Integer) { writeInt(((Integer) val).intValue()); return true; } else if (val instanceof Long) { writeLong(((Long) val).longValue()); return true; } else if (val instanceof Float) { writeFloat(((Float) val).floatValue()); return true; } else if (val instanceof Double) { daos.writeByte(DOUBLE); daos.writeDouble(((Double) val).doubleValue()); return true; } else if (val instanceof Byte) { daos.writeByte(BYTE); daos.writeByte(((Byte) val).intValue()); return true; } else if (val instanceof Short) { daos.writeByte(SHORT); daos.writeShort(((Short) val).intValue()); return true; } return false; } else if (val instanceof Date) { daos.writeByte(DATE); daos.writeLong(((Date) val).getTime()); return true; } else if (val instanceof Boolean) { if ((Boolean) val) daos.writeByte(BOOL_TRUE); else daos.writeByte(BOOL_FALSE); return true; } else if (val instanceof byte[]) { writeByteArray((byte[]) val, 0, ((byte[]) val).length); return true; } else if (val instanceof ByteBuffer) { ByteBuffer buf = (ByteBuffer) val; writeByteArray(buf.array(),buf.position(),buf.limit() - buf.position()); return true; } else if (val == END_OBJ) { writeTag(END); return true; } return false; } public void writeMap(Map<?,?> val) throws IOException { writeTag(MAP, val.size()); for (Map.Entry<?,?> entry : val.entrySet()) { Object key = entry.getKey(); if (key instanceof String) { writeExternString((String) key); } else { writeVal(key); } writeVal(entry.getValue()); } } public int readSize(DataInputInputStream in) throws IOException { int sz = tagByte & 0x1f; if (sz == 0x1f) sz += readVInt(in); return sz; } /** * Special method for variable length int (copied from lucene). Usually used for writing the length of a * collection/array/map In most of the cases the length can be represented in one byte (length < 127) so it saves 3 * bytes/object * * @throws IOException If there is a low-level I/O error. */ public static void writeVInt(int i, FastOutputStream out) throws IOException { while ((i & ~0x7F) != 0) { out.writeByte((byte) ((i & 0x7f) | 0x80)); i >>>= 7; } out.writeByte((byte) i); } /** * The counterpart for {@link #writeVInt(int, FastOutputStream)} * * @throws IOException If there is a low-level I/O error. */ public static int readVInt(DataInputInputStream in) throws IOException { byte b = in.readByte(); int i = b & 0x7F; for (int shift = 7; (b & 0x80) != 0; shift += 7) { b = in.readByte(); i |= (b & 0x7F) << shift; } return i; } public static void writeVLong(long i, FastOutputStream out) throws IOException { while ((i & ~0x7F) != 0) { out.writeByte((byte) ((i & 0x7f) | 0x80)); i >>>= 7; } out.writeByte((byte) i); } public static long readVLong(DataInputInputStream in) throws IOException { byte b = in.readByte(); long i = b & 0x7F; for (int shift = 7; (b & 0x80) != 0; shift += 7) { b = in.readByte(); i |= (long) (b & 0x7F) << shift; } return i; } private int stringsCount = 0; private Map<String, Integer> stringsMap; private List<String> stringsList; public void writeExternString(String s) throws IOException { if (s == null) { writeTag(NULL); return; } Integer idx = stringsMap == null ? null : stringsMap.get(s); if (idx == null) idx = 0; writeTag(EXTERN_STRING, idx); if (idx == 0) { writeStr(s); if (stringsMap == null) stringsMap = new HashMap<String, Integer>(); stringsMap.put(s, ++stringsCount); } } public String readExternString(DataInputInputStream fis) throws IOException { int idx = readSize(fis); if (idx != 0) {// idx != 0 is the index of the extern string return stringsList.get(idx - 1); } else {// idx == 0 means it has a string value String s = (String) readVal(fis); if (stringsList == null) stringsList = new ArrayList<String>(); stringsList.add(s); return s; } } public static interface ObjectResolver { public Object resolve(Object o, JavaBinCodec codec) throws IOException; } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.redis; import java.lang.reflect.Modifier; import java.security.ProtectionDomain; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentMethod; import com.navercorp.pinpoint.bootstrap.instrument.MethodFilters; import com.navercorp.pinpoint.bootstrap.instrument.PinpointInstrument; import com.navercorp.pinpoint.bootstrap.instrument.transformer.PinpointClassFileTransformer; import com.navercorp.pinpoint.bootstrap.logging.PLogger; import com.navercorp.pinpoint.bootstrap.logging.PLoggerFactory; import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPlugin; import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPluginSetupContext; /** * * @author jaehong.kim * */ public class RedisPlugin implements ProfilerPlugin, RedisConstants { private final PLogger logger = PLoggerFactory.getLogger(this.getClass()); @Override public void setup(ProfilerPluginSetupContext context) { final RedisPluginConfig config = new RedisPluginConfig(context.getConfig()); final boolean pipelineEnabled = config.isPipelineEnabled(); // jedis addJedisClassEditors(context, config); addProtocolClassEditor(context, config); if (pipelineEnabled) { // jedis pipeline addJedisClientClassEditor(context, config); addJedisPipelineClassEditors(context, config); } } // Jedis & BinaryJedis private void addJedisClassEditors(ProfilerPluginSetupContext context, RedisPluginConfig config) { addJedisExtendedClassEditor(context, config, "redis.clients.jedis.BinaryJedis", new TransformHandler() { @Override public void handle(InstrumentClass target) throws InstrumentException { target.addField(METADATA_END_POINT); } }); // Jedis extends BinaryJedis addJedisExtendedClassEditor(context, config, "redis.clients.jedis.Jedis", null); } private void addJedisExtendedClassEditor(ProfilerPluginSetupContext context, final RedisPluginConfig config, final String targetClassName, final TransformHandler handler) { context.addClassFileTransformer(targetClassName, new PinpointClassFileTransformer() { @Override public byte[] transform(PinpointInstrument instrumentContext, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentContext.getInstrumentClass(classLoader, className, classfileBuffer); if (handler != null) { handler.handle(target); } final InstrumentMethod constructorEditorBuilderArg1 = target.getConstructor("java.lang.String"); if (constructorEditorBuilderArg1 != null) { constructorEditorBuilderArg1.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisConstructorInterceptor"); } final InstrumentMethod constructorEditorBuilderArg2 = target.getConstructor("java.lang.String", "int"); if (constructorEditorBuilderArg2 != null) { constructorEditorBuilderArg2.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisConstructorInterceptor"); } final InstrumentMethod constructorEditorBuilderArg3 = target.getConstructor("java.lang.String", "int", "int"); if (constructorEditorBuilderArg3 != null) { constructorEditorBuilderArg3.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisConstructorInterceptor"); } final InstrumentMethod constructorEditorBuilderArg4 = target.getConstructor("java.net.URI"); if (constructorEditorBuilderArg4 != null) { constructorEditorBuilderArg4.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisConstructorInterceptor"); } final InstrumentMethod constructorEditorBuilderArg5 = target.getConstructor("redis.clients.jedis.JedisShardInfo"); if (constructorEditorBuilderArg5 != null) { constructorEditorBuilderArg5.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisConstructorInterceptor"); } for (InstrumentMethod method : target.getDeclaredMethods(MethodFilters.chain(MethodFilters.name(JedisMethodNames.get()), MethodFilters.modifierNot(MethodFilters.SYNTHETIC)))) { try { method.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisMethodInterceptor", config.isIo()); } catch (Exception e) { if (logger.isWarnEnabled()) { logger.warn("Unsupported method " + method, e); } } } return target.toBytecode(); } }); } // Client private void addJedisClientClassEditor(ProfilerPluginSetupContext context, RedisPluginConfig config) { context.addClassFileTransformer("redis.clients.jedis.Client", new PinpointClassFileTransformer() { @Override public byte[] transform(PinpointInstrument instrumentContext, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentContext.getInstrumentClass(classLoader, className, classfileBuffer); target.addField(METADATA_END_POINT); final InstrumentMethod constructorEditorBuilderArg1 = target.getConstructor("java.lang.String"); if (constructorEditorBuilderArg1 != null) { constructorEditorBuilderArg1.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisClientConstructorInterceptor"); } final InstrumentMethod constructorEditorBuilderArg2 = target.getConstructor("java.lang.String", "int"); if (constructorEditorBuilderArg2 != null) { constructorEditorBuilderArg2.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisClientConstructorInterceptor"); } return target.toBytecode(); } }); } private void addProtocolClassEditor(ProfilerPluginSetupContext context, RedisPluginConfig config) { context.addClassFileTransformer("redis.clients.jedis.Protocol", new PinpointClassFileTransformer() { @Override public byte[] transform(PinpointInstrument instrumentContext, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentContext.getInstrumentClass(classLoader, className, classfileBuffer); for (InstrumentMethod method : target.getDeclaredMethods(MethodFilters.chain(MethodFilters.name("sendCommand", "read"), MethodFilters.modifierNot(Modifier.PRIVATE)))) { method.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.ProtocolSendCommandAndReadMethodInterceptor"); } return target.toBytecode(); } }); } // Pipeline private void addJedisPipelineClassEditors(ProfilerPluginSetupContext context, RedisPluginConfig config) { addJedisPipelineBaseExtendedClassEditor(context, config, "redis.clients.jedis.PipelineBase", null); // MultikeyPipellineBase extends PipelineBase addJedisPipelineBaseExtendedClassEditor(context, config, "redis.clients.jedis.MultiKeyPipelineBase", null); // Pipeline extends PipelineBase addJedisPipelineBaseExtendedClassEditor(context, config, "redis.clients.jedis.Pipeline", new TransformHandler() { @Override public void handle(InstrumentClass target) throws InstrumentException { target.addField(METADATA_END_POINT); final InstrumentMethod setClientMethodEditorBuilder = target.getDeclaredMethod("setClient", "redis.clients.jedis.Client"); if (setClientMethodEditorBuilder != null) { setClientMethodEditorBuilder.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisPipelineSetClientMethodInterceptor"); } final InstrumentMethod constructorEditorBuilder = target.getConstructor("redis.clients.jedis.Client"); if (constructorEditorBuilder != null) { constructorEditorBuilder.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisPipelineConstructorInterceptor"); } } }); } private void addJedisPipelineBaseExtendedClassEditor(ProfilerPluginSetupContext context, final RedisPluginConfig config, String targetClassName, final TransformHandler handler) { context.addClassFileTransformer(targetClassName, new PinpointClassFileTransformer() { @Override public byte[] transform(PinpointInstrument instrumentContext, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentContext.getInstrumentClass(classLoader, className, classfileBuffer); if (handler != null) { handler.handle(target); } for (InstrumentMethod method : target.getDeclaredMethods(MethodFilters.chain(MethodFilters.name(JedisPipelineMethodNames.get()), MethodFilters.modifierNot(MethodFilters.SYNTHETIC)))) { try { method.addInterceptor("com.navercorp.pinpoint.plugin.redis.interceptor.JedisPipelineMethodInterceptor", config.isIo()); } catch (Exception e) { if (logger.isWarnEnabled()) { logger.warn("Unsupported method " + method, e); } } } return target.toBytecode(); } }); } private interface TransformHandler { void handle(InstrumentClass target) throws InstrumentException; } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.schemaorg.SchemaOrgTypeImpl; import com.google.schemaorg.ValueType; import com.google.schemaorg.core.datatype.Date; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.GoogConstants; import com.google.schemaorg.goog.PopularityScoreSpecification; /** Implementation of {@link School}. */ public class SchoolImpl extends EducationalOrganizationImpl implements School { private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet(); private static ImmutableSet<String> initializePropertySet() { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE); builder.add(CoreConstants.PROPERTY_ADDRESS); builder.add(CoreConstants.PROPERTY_AGGREGATE_RATING); builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME); builder.add(CoreConstants.PROPERTY_ALUMNI); builder.add(CoreConstants.PROPERTY_AREA_SERVED); builder.add(CoreConstants.PROPERTY_AWARD); builder.add(CoreConstants.PROPERTY_AWARDS); builder.add(CoreConstants.PROPERTY_BRAND); builder.add(CoreConstants.PROPERTY_CONTACT_POINT); builder.add(CoreConstants.PROPERTY_CONTACT_POINTS); builder.add(CoreConstants.PROPERTY_DEPARTMENT); builder.add(CoreConstants.PROPERTY_DESCRIPTION); builder.add(CoreConstants.PROPERTY_DISSOLUTION_DATE); builder.add(CoreConstants.PROPERTY_DUNS); builder.add(CoreConstants.PROPERTY_EMAIL); builder.add(CoreConstants.PROPERTY_EMPLOYEE); builder.add(CoreConstants.PROPERTY_EMPLOYEES); builder.add(CoreConstants.PROPERTY_EVENT); builder.add(CoreConstants.PROPERTY_EVENTS); builder.add(CoreConstants.PROPERTY_FAX_NUMBER); builder.add(CoreConstants.PROPERTY_FOUNDER); builder.add(CoreConstants.PROPERTY_FOUNDERS); builder.add(CoreConstants.PROPERTY_FOUNDING_DATE); builder.add(CoreConstants.PROPERTY_FOUNDING_LOCATION); builder.add(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER); builder.add(CoreConstants.PROPERTY_HAS_OFFER_CATALOG); builder.add(CoreConstants.PROPERTY_HAS_POS); builder.add(CoreConstants.PROPERTY_IMAGE); builder.add(CoreConstants.PROPERTY_ISIC_V4); builder.add(CoreConstants.PROPERTY_LEGAL_NAME); builder.add(CoreConstants.PROPERTY_LOCATION); builder.add(CoreConstants.PROPERTY_LOGO); builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE); builder.add(CoreConstants.PROPERTY_MAKES_OFFER); builder.add(CoreConstants.PROPERTY_MEMBER); builder.add(CoreConstants.PROPERTY_MEMBER_OF); builder.add(CoreConstants.PROPERTY_MEMBERS); builder.add(CoreConstants.PROPERTY_NAICS); builder.add(CoreConstants.PROPERTY_NAME); builder.add(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES); builder.add(CoreConstants.PROPERTY_OWNS); builder.add(CoreConstants.PROPERTY_PARENT_ORGANIZATION); builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION); builder.add(CoreConstants.PROPERTY_REVIEW); builder.add(CoreConstants.PROPERTY_REVIEWS); builder.add(CoreConstants.PROPERTY_SAME_AS); builder.add(CoreConstants.PROPERTY_SEEKS); builder.add(CoreConstants.PROPERTY_SERVICE_AREA); builder.add(CoreConstants.PROPERTY_SUB_ORGANIZATION); builder.add(CoreConstants.PROPERTY_TAX_ID); builder.add(CoreConstants.PROPERTY_TELEPHONE); builder.add(CoreConstants.PROPERTY_URL); builder.add(CoreConstants.PROPERTY_VAT_ID); builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION); builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE); return builder.build(); } static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<School.Builder> implements School.Builder { @Override public School.Builder addAdditionalType(URL value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value); } @Override public School.Builder addAdditionalType(String value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value)); } @Override public School.Builder addAddress(PostalAddress value) { return addProperty(CoreConstants.PROPERTY_ADDRESS, value); } @Override public School.Builder addAddress(PostalAddress.Builder value) { return addProperty(CoreConstants.PROPERTY_ADDRESS, value.build()); } @Override public School.Builder addAddress(Text value) { return addProperty(CoreConstants.PROPERTY_ADDRESS, value); } @Override public School.Builder addAddress(String value) { return addProperty(CoreConstants.PROPERTY_ADDRESS, Text.of(value)); } @Override public School.Builder addAggregateRating(AggregateRating value) { return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value); } @Override public School.Builder addAggregateRating(AggregateRating.Builder value) { return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, value.build()); } @Override public School.Builder addAggregateRating(String value) { return addProperty(CoreConstants.PROPERTY_AGGREGATE_RATING, Text.of(value)); } @Override public School.Builder addAlternateName(Text value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value); } @Override public School.Builder addAlternateName(String value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value)); } @Override public School.Builder addAlumni(Person value) { return addProperty(CoreConstants.PROPERTY_ALUMNI, value); } @Override public School.Builder addAlumni(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_ALUMNI, value.build()); } @Override public School.Builder addAlumni(String value) { return addProperty(CoreConstants.PROPERTY_ALUMNI, Text.of(value)); } @Override public School.Builder addAreaServed(AdministrativeArea value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public School.Builder addAreaServed(AdministrativeArea.Builder value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build()); } @Override public School.Builder addAreaServed(GeoShape value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public School.Builder addAreaServed(GeoShape.Builder value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build()); } @Override public School.Builder addAreaServed(Place value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public School.Builder addAreaServed(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value.build()); } @Override public School.Builder addAreaServed(Text value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, value); } @Override public School.Builder addAreaServed(String value) { return addProperty(CoreConstants.PROPERTY_AREA_SERVED, Text.of(value)); } @Override public School.Builder addAward(Text value) { return addProperty(CoreConstants.PROPERTY_AWARD, value); } @Override public School.Builder addAward(String value) { return addProperty(CoreConstants.PROPERTY_AWARD, Text.of(value)); } @Override public School.Builder addAwards(Text value) { return addProperty(CoreConstants.PROPERTY_AWARDS, value); } @Override public School.Builder addAwards(String value) { return addProperty(CoreConstants.PROPERTY_AWARDS, Text.of(value)); } @Override public School.Builder addBrand(Brand value) { return addProperty(CoreConstants.PROPERTY_BRAND, value); } @Override public School.Builder addBrand(Brand.Builder value) { return addProperty(CoreConstants.PROPERTY_BRAND, value.build()); } @Override public School.Builder addBrand(Organization value) { return addProperty(CoreConstants.PROPERTY_BRAND, value); } @Override public School.Builder addBrand(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_BRAND, value.build()); } @Override public School.Builder addBrand(String value) { return addProperty(CoreConstants.PROPERTY_BRAND, Text.of(value)); } @Override public School.Builder addContactPoint(ContactPoint value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value); } @Override public School.Builder addContactPoint(ContactPoint.Builder value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, value.build()); } @Override public School.Builder addContactPoint(String value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINT, Text.of(value)); } @Override public School.Builder addContactPoints(ContactPoint value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value); } @Override public School.Builder addContactPoints(ContactPoint.Builder value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, value.build()); } @Override public School.Builder addContactPoints(String value) { return addProperty(CoreConstants.PROPERTY_CONTACT_POINTS, Text.of(value)); } @Override public School.Builder addDepartment(Organization value) { return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value); } @Override public School.Builder addDepartment(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_DEPARTMENT, value.build()); } @Override public School.Builder addDepartment(String value) { return addProperty(CoreConstants.PROPERTY_DEPARTMENT, Text.of(value)); } @Override public School.Builder addDescription(Text value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value); } @Override public School.Builder addDescription(String value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value)); } @Override public School.Builder addDissolutionDate(Date value) { return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, value); } @Override public School.Builder addDissolutionDate(String value) { return addProperty(CoreConstants.PROPERTY_DISSOLUTION_DATE, Text.of(value)); } @Override public School.Builder addDuns(Text value) { return addProperty(CoreConstants.PROPERTY_DUNS, value); } @Override public School.Builder addDuns(String value) { return addProperty(CoreConstants.PROPERTY_DUNS, Text.of(value)); } @Override public School.Builder addEmail(Text value) { return addProperty(CoreConstants.PROPERTY_EMAIL, value); } @Override public School.Builder addEmail(String value) { return addProperty(CoreConstants.PROPERTY_EMAIL, Text.of(value)); } @Override public School.Builder addEmployee(Person value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value); } @Override public School.Builder addEmployee(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEE, value.build()); } @Override public School.Builder addEmployee(String value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEE, Text.of(value)); } @Override public School.Builder addEmployees(Person value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value); } @Override public School.Builder addEmployees(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEES, value.build()); } @Override public School.Builder addEmployees(String value) { return addProperty(CoreConstants.PROPERTY_EMPLOYEES, Text.of(value)); } @Override public School.Builder addEvent(Event value) { return addProperty(CoreConstants.PROPERTY_EVENT, value); } @Override public School.Builder addEvent(Event.Builder value) { return addProperty(CoreConstants.PROPERTY_EVENT, value.build()); } @Override public School.Builder addEvent(String value) { return addProperty(CoreConstants.PROPERTY_EVENT, Text.of(value)); } @Override public School.Builder addEvents(Event value) { return addProperty(CoreConstants.PROPERTY_EVENTS, value); } @Override public School.Builder addEvents(Event.Builder value) { return addProperty(CoreConstants.PROPERTY_EVENTS, value.build()); } @Override public School.Builder addEvents(String value) { return addProperty(CoreConstants.PROPERTY_EVENTS, Text.of(value)); } @Override public School.Builder addFaxNumber(Text value) { return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, value); } @Override public School.Builder addFaxNumber(String value) { return addProperty(CoreConstants.PROPERTY_FAX_NUMBER, Text.of(value)); } @Override public School.Builder addFounder(Person value) { return addProperty(CoreConstants.PROPERTY_FOUNDER, value); } @Override public School.Builder addFounder(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_FOUNDER, value.build()); } @Override public School.Builder addFounder(String value) { return addProperty(CoreConstants.PROPERTY_FOUNDER, Text.of(value)); } @Override public School.Builder addFounders(Person value) { return addProperty(CoreConstants.PROPERTY_FOUNDERS, value); } @Override public School.Builder addFounders(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_FOUNDERS, value.build()); } @Override public School.Builder addFounders(String value) { return addProperty(CoreConstants.PROPERTY_FOUNDERS, Text.of(value)); } @Override public School.Builder addFoundingDate(Date value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, value); } @Override public School.Builder addFoundingDate(String value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_DATE, Text.of(value)); } @Override public School.Builder addFoundingLocation(Place value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value); } @Override public School.Builder addFoundingLocation(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, value.build()); } @Override public School.Builder addFoundingLocation(String value) { return addProperty(CoreConstants.PROPERTY_FOUNDING_LOCATION, Text.of(value)); } @Override public School.Builder addGlobalLocationNumber(Text value) { return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, value); } @Override public School.Builder addGlobalLocationNumber(String value) { return addProperty(CoreConstants.PROPERTY_GLOBAL_LOCATION_NUMBER, Text.of(value)); } @Override public School.Builder addHasOfferCatalog(OfferCatalog value) { return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value); } @Override public School.Builder addHasOfferCatalog(OfferCatalog.Builder value) { return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, value.build()); } @Override public School.Builder addHasOfferCatalog(String value) { return addProperty(CoreConstants.PROPERTY_HAS_OFFER_CATALOG, Text.of(value)); } @Override public School.Builder addHasPOS(Place value) { return addProperty(CoreConstants.PROPERTY_HAS_POS, value); } @Override public School.Builder addHasPOS(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_HAS_POS, value.build()); } @Override public School.Builder addHasPOS(String value) { return addProperty(CoreConstants.PROPERTY_HAS_POS, Text.of(value)); } @Override public School.Builder addImage(ImageObject value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public School.Builder addImage(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value.build()); } @Override public School.Builder addImage(URL value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public School.Builder addImage(String value) { return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value)); } @Override public School.Builder addIsicV4(Text value) { return addProperty(CoreConstants.PROPERTY_ISIC_V4, value); } @Override public School.Builder addIsicV4(String value) { return addProperty(CoreConstants.PROPERTY_ISIC_V4, Text.of(value)); } @Override public School.Builder addLegalName(Text value) { return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, value); } @Override public School.Builder addLegalName(String value) { return addProperty(CoreConstants.PROPERTY_LEGAL_NAME, Text.of(value)); } @Override public School.Builder addLocation(Place value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value); } @Override public School.Builder addLocation(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value.build()); } @Override public School.Builder addLocation(PostalAddress value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value); } @Override public School.Builder addLocation(PostalAddress.Builder value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value.build()); } @Override public School.Builder addLocation(Text value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value); } @Override public School.Builder addLocation(String value) { return addProperty(CoreConstants.PROPERTY_LOCATION, Text.of(value)); } @Override public School.Builder addLogo(ImageObject value) { return addProperty(CoreConstants.PROPERTY_LOGO, value); } @Override public School.Builder addLogo(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_LOGO, value.build()); } @Override public School.Builder addLogo(URL value) { return addProperty(CoreConstants.PROPERTY_LOGO, value); } @Override public School.Builder addLogo(String value) { return addProperty(CoreConstants.PROPERTY_LOGO, Text.of(value)); } @Override public School.Builder addMainEntityOfPage(CreativeWork value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public School.Builder addMainEntityOfPage(CreativeWork.Builder value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build()); } @Override public School.Builder addMainEntityOfPage(URL value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public School.Builder addMainEntityOfPage(String value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value)); } @Override public School.Builder addMakesOffer(Offer value) { return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value); } @Override public School.Builder addMakesOffer(Offer.Builder value) { return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, value.build()); } @Override public School.Builder addMakesOffer(String value) { return addProperty(CoreConstants.PROPERTY_MAKES_OFFER, Text.of(value)); } @Override public School.Builder addMember(Organization value) { return addProperty(CoreConstants.PROPERTY_MEMBER, value); } @Override public School.Builder addMember(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBER, value.build()); } @Override public School.Builder addMember(Person value) { return addProperty(CoreConstants.PROPERTY_MEMBER, value); } @Override public School.Builder addMember(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBER, value.build()); } @Override public School.Builder addMember(String value) { return addProperty(CoreConstants.PROPERTY_MEMBER, Text.of(value)); } @Override public School.Builder addMemberOf(Organization value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value); } @Override public School.Builder addMemberOf(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build()); } @Override public School.Builder addMemberOf(ProgramMembership value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value); } @Override public School.Builder addMemberOf(ProgramMembership.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, value.build()); } @Override public School.Builder addMemberOf(String value) { return addProperty(CoreConstants.PROPERTY_MEMBER_OF, Text.of(value)); } @Override public School.Builder addMembers(Organization value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, value); } @Override public School.Builder addMembers(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build()); } @Override public School.Builder addMembers(Person value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, value); } @Override public School.Builder addMembers(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, value.build()); } @Override public School.Builder addMembers(String value) { return addProperty(CoreConstants.PROPERTY_MEMBERS, Text.of(value)); } @Override public School.Builder addNaics(Text value) { return addProperty(CoreConstants.PROPERTY_NAICS, value); } @Override public School.Builder addNaics(String value) { return addProperty(CoreConstants.PROPERTY_NAICS, Text.of(value)); } @Override public School.Builder addName(Text value) { return addProperty(CoreConstants.PROPERTY_NAME, value); } @Override public School.Builder addName(String value) { return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value)); } @Override public School.Builder addNumberOfEmployees(QuantitativeValue value) { return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value); } @Override public School.Builder addNumberOfEmployees(QuantitativeValue.Builder value) { return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, value.build()); } @Override public School.Builder addNumberOfEmployees(String value) { return addProperty(CoreConstants.PROPERTY_NUMBER_OF_EMPLOYEES, Text.of(value)); } @Override public School.Builder addOwns(OwnershipInfo value) { return addProperty(CoreConstants.PROPERTY_OWNS, value); } @Override public School.Builder addOwns(OwnershipInfo.Builder value) { return addProperty(CoreConstants.PROPERTY_OWNS, value.build()); } @Override public School.Builder addOwns(Product value) { return addProperty(CoreConstants.PROPERTY_OWNS, value); } @Override public School.Builder addOwns(Product.Builder value) { return addProperty(CoreConstants.PROPERTY_OWNS, value.build()); } @Override public School.Builder addOwns(String value) { return addProperty(CoreConstants.PROPERTY_OWNS, Text.of(value)); } @Override public School.Builder addParentOrganization(Organization value) { return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value); } @Override public School.Builder addParentOrganization(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, value.build()); } @Override public School.Builder addParentOrganization(String value) { return addProperty(CoreConstants.PROPERTY_PARENT_ORGANIZATION, Text.of(value)); } @Override public School.Builder addPotentialAction(Action value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value); } @Override public School.Builder addPotentialAction(Action.Builder value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build()); } @Override public School.Builder addPotentialAction(String value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value)); } @Override public School.Builder addReview(Review value) { return addProperty(CoreConstants.PROPERTY_REVIEW, value); } @Override public School.Builder addReview(Review.Builder value) { return addProperty(CoreConstants.PROPERTY_REVIEW, value.build()); } @Override public School.Builder addReview(String value) { return addProperty(CoreConstants.PROPERTY_REVIEW, Text.of(value)); } @Override public School.Builder addReviews(Review value) { return addProperty(CoreConstants.PROPERTY_REVIEWS, value); } @Override public School.Builder addReviews(Review.Builder value) { return addProperty(CoreConstants.PROPERTY_REVIEWS, value.build()); } @Override public School.Builder addReviews(String value) { return addProperty(CoreConstants.PROPERTY_REVIEWS, Text.of(value)); } @Override public School.Builder addSameAs(URL value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, value); } @Override public School.Builder addSameAs(String value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value)); } @Override public School.Builder addSeeks(Demand value) { return addProperty(CoreConstants.PROPERTY_SEEKS, value); } @Override public School.Builder addSeeks(Demand.Builder value) { return addProperty(CoreConstants.PROPERTY_SEEKS, value.build()); } @Override public School.Builder addSeeks(String value) { return addProperty(CoreConstants.PROPERTY_SEEKS, Text.of(value)); } @Override public School.Builder addServiceArea(AdministrativeArea value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value); } @Override public School.Builder addServiceArea(AdministrativeArea.Builder value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build()); } @Override public School.Builder addServiceArea(GeoShape value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value); } @Override public School.Builder addServiceArea(GeoShape.Builder value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build()); } @Override public School.Builder addServiceArea(Place value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value); } @Override public School.Builder addServiceArea(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, value.build()); } @Override public School.Builder addServiceArea(String value) { return addProperty(CoreConstants.PROPERTY_SERVICE_AREA, Text.of(value)); } @Override public School.Builder addSubOrganization(Organization value) { return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value); } @Override public School.Builder addSubOrganization(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, value.build()); } @Override public School.Builder addSubOrganization(String value) { return addProperty(CoreConstants.PROPERTY_SUB_ORGANIZATION, Text.of(value)); } @Override public School.Builder addTaxID(Text value) { return addProperty(CoreConstants.PROPERTY_TAX_ID, value); } @Override public School.Builder addTaxID(String value) { return addProperty(CoreConstants.PROPERTY_TAX_ID, Text.of(value)); } @Override public School.Builder addTelephone(Text value) { return addProperty(CoreConstants.PROPERTY_TELEPHONE, value); } @Override public School.Builder addTelephone(String value) { return addProperty(CoreConstants.PROPERTY_TELEPHONE, Text.of(value)); } @Override public School.Builder addUrl(URL value) { return addProperty(CoreConstants.PROPERTY_URL, value); } @Override public School.Builder addUrl(String value) { return addProperty(CoreConstants.PROPERTY_URL, Text.of(value)); } @Override public School.Builder addVatID(Text value) { return addProperty(CoreConstants.PROPERTY_VAT_ID, value); } @Override public School.Builder addVatID(String value) { return addProperty(CoreConstants.PROPERTY_VAT_ID, Text.of(value)); } @Override public School.Builder addDetailedDescription(Article value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value); } @Override public School.Builder addDetailedDescription(Article.Builder value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build()); } @Override public School.Builder addDetailedDescription(String value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value)); } @Override public School.Builder addPopularityScore(PopularityScoreSpecification value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value); } @Override public School.Builder addPopularityScore(PopularityScoreSpecification.Builder value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build()); } @Override public School.Builder addPopularityScore(String value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value)); } @Override public School build() { return new SchoolImpl(properties, reverseMap); } } public SchoolImpl(Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) { super(properties, reverseMap); } @Override public String getFullTypeName() { return CoreConstants.TYPE_SCHOOL; } @Override public boolean includesProperty(String property) { return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property) || PROPERTY_SET.contains(GoogConstants.NAMESPACE + property) || PROPERTY_SET.contains(property); } }
/* * Copyright 2014-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.dynamodbv2.document.internal; import static com.amazonaws.util.BinaryUtils.copyAllBytesFrom; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.services.dynamodbv2.document.AttributeUpdate; import com.amazonaws.services.dynamodbv2.document.Expected; import com.amazonaws.services.dynamodbv2.document.IncompatibleTypeException; import com.amazonaws.services.dynamodbv2.document.Item; import com.amazonaws.services.dynamodbv2.document.KeyAttribute; import com.amazonaws.services.dynamodbv2.document.PrimaryKey; import com.amazonaws.services.dynamodbv2.model.AttributeValue; import com.amazonaws.services.dynamodbv2.model.AttributeValueUpdate; import com.amazonaws.services.dynamodbv2.model.ComparisonOperator; import com.amazonaws.services.dynamodbv2.model.Condition; import com.amazonaws.services.dynamodbv2.model.ExpectedAttributeValue; import com.amazonaws.util.VersionInfoUtils; /** * Internal utilities. Not meant for general use. May change without notice. */ public enum InternalUtils { ; /** * Returns a non-null list of <code>Item</code>'s given the low level * list of item information. */ public static List<Item> toItemList(List<Map<String, AttributeValue>> items) { if (items == null) return Collections.emptyList(); List<Item> result = new ArrayList<Item>(items.size()); for (Map<String, AttributeValue> item : items) result.add(Item.fromMap(toSimpleMapValue(item))); return result; } /** * Converts an <code>Item</code> into the low-level representation; * or null if the input is null. */ public static Map<String, AttributeValue> toAttributeValues(Item item) { if (item == null) return null; // row with multiple attributes Map<String, AttributeValue> result = new LinkedHashMap<String, AttributeValue>(); for (Map.Entry<String, Object> entry : item.attributes()) result.put(entry.getKey(), toAttributeValue(entry.getValue())); return result; } /** * Converts a map of string to simple objects into the low-level * representation; or null if the input is null. */ public static Map<String, AttributeValue> fromSimpleMap( Map<String, Object> map) { if (map == null) return null; // row with multiple attributes Map<String, AttributeValue> result = new LinkedHashMap<String, AttributeValue>(); for (Map.Entry<String, Object> entry : map.entrySet()) result.put(entry.getKey(), toAttributeValue(entry.getValue())); return result; } /** * Converts a list of <code>AttributeUpdate</code> into the low-level * representation; or null if the input is null. */ public static Map<String, AttributeValueUpdate> toAttributeValueUpdate( List<AttributeUpdate> attributesToUpdate) { if (attributesToUpdate == null) return null; Map<String, AttributeValueUpdate> result = new LinkedHashMap<String, AttributeValueUpdate>(); for (AttributeUpdate attribute : attributesToUpdate) { AttributeValueUpdate attributeToUpdate = new AttributeValueUpdate() .withAction(attribute.getAction()); if (attribute.getValue() != null) { attributeToUpdate.withValue(toAttributeValue(attribute .getValue())); } else if (attribute.getAttributeValues() != null) { attributeToUpdate.withValue(toAttributeValue(attribute .getAttributeValues())); } result.put(attribute.getAttributeName(), attributeToUpdate); } return result; } /** * Converts a simple value into the low-level <code><AttributeValue/code> * representation. * * @param value * the given value which can be one of the followings: * <ul> * <li>String</li> * <li>Set&lt;String></li> * <li>Number (including any subtypes and primitive types)</li> * <li>Set&lt;Number></li> * <li>byte[]</li> * <li>Set&lt;byte[]></li> * <li>ByteBuffer</li> * <li>Set&lt;ByteBuffer></li> * <li>Boolean or boolean</li> * <li>null</li> * <li>Map&lt;String,T>, where T can be any type on this list but must not * induce any circular reference</li> * <li>List&lt;T>, where T can be any type on this list but must not induce * any circular reference</li> * </ul> * @return a non-null low level representation of the input object value * * @throws UnsupportedOperationException * if the input object type is not supported */ public static AttributeValue toAttributeValue(Object value) { AttributeValue result = new AttributeValue(); if (value == null) { return result.withNULL(Boolean.TRUE); } else if (value instanceof Boolean) { return result.withBOOL((Boolean)value); } else if (value instanceof String) { return result.withS((String) value); } else if (value instanceof BigDecimal) { BigDecimal bd = (BigDecimal) value; return result.withN(bd.toPlainString()); } else if (value instanceof Number) { return result.withN(value.toString()); } else if (value instanceof byte[]) { return result.withB(ByteBuffer.wrap((byte[]) value)); } else if (value instanceof ByteBuffer) { return result.withB((ByteBuffer) value); } else if (value instanceof Set) { // default to an empty string set if there is no element @SuppressWarnings("unchecked") Set<Object> set = (Set<Object>) value; if (set.size() == 0) { result.setSS(new LinkedHashSet<String>()); return result; } Object element = set.iterator().next(); if (element instanceof String) { @SuppressWarnings("unchecked") Set<String> ss = (Set<String>) value; result.setSS(new ArrayList<String>(ss)); } else if (element instanceof Number) { @SuppressWarnings("unchecked") Set<Number> in = (Set<Number>) value; List<String> out = new ArrayList<String>(set.size()); for (Number n : in) { BigDecimal bd = InternalUtils.toBigDecimal(n); out.add(bd.toPlainString()); } result.setNS(out); } else if (element instanceof byte[]) { @SuppressWarnings("unchecked") Set<byte[]> in = (Set<byte[]>) value; List<ByteBuffer> out = new ArrayList<ByteBuffer>(set.size()); for (byte[] buf : in) { out.add(ByteBuffer.wrap(buf)); } result.setBS(out); } else if (element instanceof ByteBuffer) { @SuppressWarnings("unchecked") Set<ByteBuffer> bs = (Set<ByteBuffer>) value; result.setBS(bs); } else { throw new UnsupportedOperationException("element type: " + element.getClass()); } } else if (value instanceof List) { @SuppressWarnings("unchecked") List<Object> in = (List<Object>) value; List<AttributeValue> out = new ArrayList<AttributeValue>(); for (Object v : in) { out.add(toAttributeValue(v)); } result.setL(out); } else if (value instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> in = (Map<String, Object>) value; if (in.size() > 0) { for (Map.Entry<String, Object> e : in.entrySet()) { result.addMEntry(e.getKey(), toAttributeValue(e.getValue())); } } else { // empty map result.setM(new LinkedHashMap<String,AttributeValue>()); } } else { throw new UnsupportedOperationException("value type: " + value.getClass()); } return result; } /** * Converts a list of low-level <code>AttributeValue</code> into a list of * simple values. Each value in the returned list can be one of the * followings: * * <ul> * <li>String</li> * <li>Set&lt;String></li> * <li>Number (including any subtypes and primitive types)</li> * <li>Set&lt;Number></li> * <li>byte[]</li> * <li>Set&lt;byte[]></li> * <li>ByteBuffer</li> * <li>Set&lt;ByteBuffer></li> * <li>Boolean or boolean</li> * <li>null</li> * <li>Map&lt;String,T>, where T can be any type on this list but must not * induce any circular reference</li> * <li>List&lt;T>, where T can be any type on this list but must not induce * any circular reference</li> * </ul> */ public static List<Object> toSimpleList(List<AttributeValue> attrValues) { if (attrValues == null) return null; List<Object> result = new ArrayList<Object>(attrValues.size()); for (AttributeValue attrValue : attrValues) { Object value = toSimpleValue(attrValue); result.add(value); } return result; } /** * Convenient method to convert a list of low-level * <code>AttributeValue</code> into a list of values of the same type T. * Each value in the returned list can be one of the followings: * <ul> * <li>String</li> * <li>Set&lt;String></li> * <li>Number (including any subtypes and primitive types)</li> * <li>Set&lt;Number></li> * <li>byte[]</li> * <li>Set&lt;byte[]></li> * <li>ByteBuffer</li> * <li>Set&lt;ByteBuffer></li> * <li>Boolean or boolean</li> * <li>null</li> * <li>Map&lt;String,T>, where T can be any type on this list but must not * induce any circular reference</li> * <li>List&lt;T>, where T can be any type on this list but must not induce * any circular reference</li> * </ul> */ public static <T> List<T> toSimpleListValue(List<AttributeValue> values) { if (values == null) { return null; } List<T> result = new ArrayList<T>(values.size()); for (AttributeValue v : values) { T t = toSimpleValue(v); result.add(t); } return result; } public static <T> Map<String, T> toSimpleMapValue( Map<String, AttributeValue> values) { if (values == null) { return null; } Map<String, T> result = new LinkedHashMap<String, T>(values.size()); for (Map.Entry<String, AttributeValue> entry : values.entrySet()) { T t = toSimpleValue(entry.getValue()); result.put(entry.getKey(), t); } return result; } /** * Returns the string representation of the given value; or null if the * value is null. For <code>BigDecimal</code> it will be the string * representation without an exponent field. */ public static String valToString(Object val) { if (val instanceof BigDecimal) { BigDecimal bd = (BigDecimal)val; return bd.toPlainString(); } if (val == null) return null; if (val instanceof String || val instanceof Boolean || val instanceof Number) return val.toString(); throw new IncompatibleTypeException("Cannot convert " + val.getClass() + " into a string"); } /** * Converts a low-level <code>AttributeValue</code> into a simple value, * which can be one of the followings: * * <ul> * <li>String</li> * <li>Set&lt;String></li> * <li>Number (including any subtypes and primitive types)</li> * <li>Set&lt;Number></li> * <li>byte[]</li> * <li>Set&lt;byte[]></li> * <li>ByteBuffer</li> * <li>Set&lt;ByteBuffer></li> * <li>Boolean or boolean</li> * <li>null</li> * <li>Map&lt;String,T>, where T can be any type on this list but must not * induce any circular reference</li> * <li>List&lt;T>, where T can be any type on this list but must not induce * any circular reference</li> * </ul> * * @throws IllegalArgumentException * if an empty <code>AttributeValue</code> value is specified */ static <T> T toSimpleValue(AttributeValue value) { if (value == null) { return null; } if (Boolean.TRUE.equals(value.getNULL())) { return null; } else if (Boolean.FALSE.equals(value.getNULL())) { throw new UnsupportedOperationException("False-NULL is not supported in DynamoDB"); } else if (value.getBOOL() != null) { @SuppressWarnings("unchecked") T t = (T) value.getBOOL(); return t; } else if (value.getS() != null) { @SuppressWarnings("unchecked") T t = (T) value.getS(); return t; } else if (value.getN() != null) { @SuppressWarnings("unchecked") T t = (T) new BigDecimal(value.getN()); return t; } else if (value.getB() != null) { @SuppressWarnings("unchecked") T t = (T) copyAllBytesFrom(value.getB()); return t; } else if (value.getSS() != null) { @SuppressWarnings("unchecked") T t = (T) new LinkedHashSet<String>(value.getSS()); return t; } else if (value.getNS() != null) { Set<BigDecimal> set = new LinkedHashSet<BigDecimal>(value.getNS().size()); for (String s : value.getNS()) { set.add(new BigDecimal(s)); } @SuppressWarnings("unchecked") T t = (T) set; return t; } else if (value.getBS() != null) { Set<byte[]> set = new LinkedHashSet<byte[]>(value.getBS().size()); for (ByteBuffer bb : value.getBS()) { set.add(copyAllBytesFrom(bb)); } @SuppressWarnings("unchecked") T t = (T) set; return t; } else if (value.getL() != null) { @SuppressWarnings("unchecked") T t = (T) toSimpleList(value.getL()); return t; } else if (value.getM() != null) { @SuppressWarnings("unchecked") T t = (T) toSimpleMapValue(value.getM()); return t; } else { throw new IllegalArgumentException( "Attribute value must not be empty: " + value); } } /** * Returns the minimum of the two input integers taking null into account. * Returns null if both integers are null. Otherwise, a null Integer is * treated as infinity. */ public static Integer minimum(Integer one, Integer two) { if (one == null) { return two; } else if (two == null) { return one; } else if (one < two) { return one; } else { return two; } } /** * Returns the low level representation of a collection of <code>Expected</code>. */ public static Map<String, ExpectedAttributeValue> toExpectedAttributeValueMap( Collection<Expected> expectedSet) { if (expectedSet == null) return null; Map<String, ExpectedAttributeValue> expectedMap = new LinkedHashMap<String, ExpectedAttributeValue>(); for (Expected expected : expectedSet) { final String attr = expected.getAttribute(); final Object[] values = expected.getValues(); ExpectedAttributeValue eav = new ExpectedAttributeValue(); if (values != null) { if (values.length > 0) { // convert from list of object values to list of AttributeValues AttributeValue[] avs = InternalUtils.toAttributeValues(values); eav.withAttributeValueList(avs); } else { throw new IllegalStateException("Bug!"); } } ComparisonOperator op = expected.getComparisonOperator(); if (op == null) { throw new IllegalArgumentException( "Comparison operator for attribute " + expected.getAttribute() + " must be specified"); } eav.withComparisonOperator(op); expectedMap.put(attr, eav); } if (expectedSet.size() != expectedMap.size()) throw new IllegalArgumentException("duplicates attribute names not allowed in input"); return Collections.unmodifiableMap(expectedMap); } /** * Returns the low level representation of a collection of <code>Filter</code>. */ public static Map<String, Condition> toAttributeConditionMap(Collection<? extends Filter<?>> filters) { if (filters == null) return null; Map<String, Condition> conditionMap = new LinkedHashMap<String, Condition>(); for (Filter<?> filter : filters) { final String attr = filter.getAttribute(); final Object[] values = filter.getValues(); Condition condition = new Condition(); if (values != null) { if (values.length > 0) { // convert from list of object values to list of AttributeValues AttributeValue[] avs = InternalUtils.toAttributeValues(values); condition.withAttributeValueList(avs); } else { throw new IllegalStateException("Bug!"); } } ComparisonOperator op = filter.getComparisonOperator(); if (op == null) { throw new IllegalArgumentException( "Comparison operator for attribute " + filter.getAttribute() + " must be specified"); } condition.withComparisonOperator(op); conditionMap.put(attr, condition); } if (filters.size() != conditionMap.size()) throw new IllegalArgumentException("duplicates attribute names not allowed in input"); return Collections.unmodifiableMap(conditionMap); } /** * Converts the input array of values into an array of low level * representation of those values. * * A value in the input array can be one of the followings: * * <ul> * <li>String</li> * <li>Set&lt;String></li> * <li>Number (including any subtypes and primitive types)</li> * <li>Set&lt;Number></li> * <li>byte[]</li> * <li>Set&lt;byte[]></li> * <li>ByteBuffer</li> * <li>Set&lt;ByteBuffer></li> * <li>Boolean or boolean</li> * <li>null</li> * <li>Map&lt;String,T>, where T can be any type on this list but must not * induce any circular reference</li> * <li>List&lt;T>, where T can be any type on this list but must not induce * any circular reference</li> * </ul> */ public static AttributeValue[] toAttributeValues(Object[] values) { AttributeValue[] attrValues = new AttributeValue[values.length]; for (int i=0; i < values.length; i++) attrValues[i] = InternalUtils.toAttributeValue(values[i]); return attrValues; } /** * Converts the specified primary key into the low-level representation. */ public static Map<String, AttributeValue> toAttributeValueMap( Collection<KeyAttribute> primaryKey) { if (primaryKey == null) return null; Map<String, AttributeValue> keys = new LinkedHashMap<String, AttributeValue>(); for (KeyAttribute keyAttr : primaryKey) keys.put(keyAttr.getName(), InternalUtils.toAttributeValue(keyAttr.getValue())); return Collections.unmodifiableMap(keys); } /** * Converts the specified primary key into the low-level representation. */ public static Map<String, AttributeValue> toAttributeValueMap( PrimaryKey primaryKey) { if (primaryKey == null) return null; return toAttributeValueMap(primaryKey.getComponents()); } /** * Converts the specified primary key into the low-level representation. */ public static Map<String, AttributeValue> toAttributeValueMap( KeyAttribute ... primaryKey) { if (primaryKey == null) return null; return toAttributeValueMap(Arrays.asList(primaryKey)); } /** * Converts a number into BigDecimal representation. */ public static BigDecimal toBigDecimal(Number n) { if (n instanceof BigDecimal) return (BigDecimal)n; return new BigDecimal(n.toString()); } public static Set<BigDecimal> toBigDecimalSet(Number ... val) { Set<BigDecimal> set = new LinkedHashSet<BigDecimal>(val.length); for (Number n: val) set.add(InternalUtils.toBigDecimal(n)); return set; } public static Set<BigDecimal> toBigDecimalSet(Set<Number> vals) { Set<BigDecimal> set = new LinkedHashSet<BigDecimal>(vals.size()); for (Number n: vals) set.add(InternalUtils.toBigDecimal(n)); return set; } /** * Append the custom user-agent string. */ public static <X extends AmazonWebServiceRequest> X applyUserAgent(X request) { final String USER_AGENT = "dynamodb-table-api/" + VersionInfoUtils.getVersion(); request.getRequestClientOptions().appendUserAgent(USER_AGENT); return request; } public static void rejectNullValue(Object val) { if (val == null) throw new IllegalArgumentException("Input value must not be null"); } public static void rejectNullInput(Object input) { if (input == null) throw new IllegalArgumentException("Input must not be null"); } public static void rejectEmptyInput(Object[] input) { if (input.length == 0) throw new IllegalArgumentException("At least one input must be specified"); } public static void rejectNullOrEmptyInput(Object[] input) { rejectNullInput(input); rejectEmptyInput(input); } public static void checkInvalidAttrName(String attrName) { if (attrName == null || attrName.trim().length() == 0) throw new IllegalArgumentException("Attribute name must not be null or empty"); } public static void checkInvalidAttribute(String attrName, Object val) { checkInvalidAttrName(attrName); rejectNullValue(val); } }
/* * Copyright 2021 Allette Systems (Australia) * http://www.allette.com.au * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.pageseeder.ox.berlioz.util; import org.apache.commons.fileupload.FileItem; import org.apache.commons.fileupload.FileUploadBase.InvalidContentTypeException; import org.apache.commons.fileupload.FileUploadBase.SizeLimitExceededException; import org.apache.commons.fileupload.FileUploadException; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.pageseeder.berlioz.GlobalSettings; import org.pageseeder.ox.OXConfig; import org.pageseeder.ox.OXErrorMessage; import org.pageseeder.ox.OXException; import org.pageseeder.ox.core.*; import org.pageseeder.ox.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.http.HttpServletRequest; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * A file file handler to receive the data from user upload. * * @author Ciber Cai * @version 10 November 2014 */ public final class FileHandler { /** The logger. */ private static Logger LOGGER = LoggerFactory.getLogger(FileHandler.class); /** * To pipeline jobs. * * @param packs the packs * @return the list */ public static List<PipelineJob> toPipelineJobs(List<PackageData> packs) { ensureConfigured(); List<PipelineJob> jobs = new ArrayList<PipelineJob>(); long slowSize = GlobalSettings.get("ox2.slow-mode.size", -1); long maxInactiveTimeAllowed = Long.parseLong(GlobalSettings.get("ox2.max-inactive-time-ms", String.valueOf(StepJob.DEFAULT_MAX_INACTIVE_TIME_MS))); LOGGER.debug("Started creating the Pipeline Jobs"); for (PackageData pack : packs) { boolean isSlowMode = slowSize > 0 && pack.getOriginal().exists() && (pack.getOriginal().length() - slowSize * 1024 > 0); LOGGER.debug("slow mode {}", isSlowMode); String p = pack.getParameter("pipeline"); String modelName = pack.getParameter("model"); Model model = new Model(modelName); LOGGER.debug("Model {} ", modelName); if (p != null) { Pipeline pipeline = model.getPipeline(p); if (pipeline != null) { PipelineJob job = new PipelineJob(pipeline, pack); job.setSlowMode(isSlowMode); job.setMaxInactiveTimeAllowed(maxInactiveTimeAllowed); jobs.add(job); } else { LOGGER.warn("pipeline {} not found", p); } } else { Pipeline pipeline = model.getPipelineDefault(); PipelineJob job = new PipelineJob(pipeline, pack); job.setSlowMode(isSlowMode); jobs.add(job); } } LOGGER.debug("Ended creating the Pipeline Jobs"); return jobs; } /** * Receive. * * If there is any parameter in the url like /model/{model}/pipeline/{pipeline}.html . they will not be in the * HttpServletRequest. * Then before calling this method, you will need to set them in the attribute of HttpServletRequest, like: * req.setAttribute("model", model); * req.setAttribute("pipeline", pipeline); * * @param req the ContentRequest * @return the list of PackageData * @throws IOException when I/O error occur. * @throws OXException the OX exception */ public static List<PackageData> receive(HttpServletRequest req) throws IOException, OXException { List<PackageData> packs = new ArrayList<PackageData>(); // parse the upload request UploadProcessor processor = null; try { LOGGER.debug("Getting uploaded file."); UploadFactory factory = UploadFactory.getInstance(); processor = factory.make(req); LOGGER.debug("Uploaded file loaded."); } catch (SizeLimitExceededException ex) { LOGGER.warn("File size exceeds upload limit please choose another file.", ex); throw new OXException("File size exceeds the limit of " + GlobalSettings.get("ox2.upload.max-size", 10) + "MB.", ex); } catch (InvalidContentTypeException ex) { LOGGER.warn("Invalid content type.", ex); throw new OXException("Invalid content type.", ex); } catch (FileUploadException ex) { LOGGER.error("File Upload Exception: {}", ex); throw new OXException("Cannot process the upload request.", ex); } // Check that we have a file upload request boolean isMultipart = processor.isMultipart(); LOGGER.debug("Is it multipart? {}", isMultipart); String model = processor.getParameter("model", req.getParameter("model")); if (StringUtils.isBlank(model)) { model = (String) req.getAttribute("model"); if (StringUtils.isBlank(model)) { throw new OXException("Model cannot be null or empty"); } } if (isMultipart) { List<FileItem> items = processor.getFileItemList(); for (FileItem item : items) { if (!item.isFormField()) { String filename = getFilename(item); LOGGER.debug("item content type {}", item.getContentType()); LOGGER.debug("item filename {}", filename); //TODO add user logged to the package PackageData pack = toPackageData(item, filename, model); LOGGER.debug("pack {}", pack != null ? pack.id() : "null"); if (pack != null) { pack.saveProperties(); packs.add(pack); } } } LOGGER.debug("Adding parameters to package."); Map<String, String> formParameters = processor.getParameters(); Map<String, String[]> urlParameters = req.getParameterMap(); Map<String, String> parameters = mixParameters(formParameters, urlParameters); LOGGER.debug("Number of parameters {}", parameters.size()); // Add the parameter to each pack for (PackageData pack : packs) { for (Entry<String, String> parameter : parameters.entrySet()) { pack.setParameter(parameter.getKey(), parameter.getValue()); pack.saveProperties(); } } } // Return package data return packs; } /** * Mix parameters. * * @param formParameters the form parameters * @param urlParameters the url parameters * @return the map */ private static Map<String, String> mixParameters (Map<String, String> formParameters, Map<String, String[]> urlParameters) { Map<String, String> parameters = new HashMap<String, String>(); if (formParameters != null) { parameters.putAll(formParameters); } if (urlParameters != null) { for(Entry<String, String[]> param:urlParameters.entrySet()) { if (!parameters.containsKey(param.getKey())) { parameters.put(param.getKey(), StringUtils.convertToString(param.getValue(), ",")); } } } return parameters; } /** * Copy to. * * @param stream the stream * @param file the file * @return the int * @throws IOException Signals that an I/O exception has occurred. * @throws OXException the OX exception */ private static final int copyTo(InputStream stream, File file) throws IOException, OXException { LOGGER.debug("Writing file: {}", file != null ? file.getAbsolutePath() : "null"); if (file == null || file.isDirectory()) throw new OXException(OXErrorMessage.FILE_NOT_SELECTED); int copied = 0; FileOutputStream os = null; try { os = new FileOutputStream(file); copied = IOUtils.copy(stream, os); } finally { IOUtils.closeQuietly(os); } return copied; } /** * To type. * * @param filename the specified file * @return the type of specified file. */ private static String toType(String filename) { if (filename == null) { throw new NullPointerException("file name cannot be null"); } String lcfilename = filename.toLowerCase(); if (lcfilename.endsWith("xml")) { return "xml"; } else if (lcfilename.endsWith("docx")) { return "docx"; } else if (lcfilename.endsWith("html")) { return "html"; } else if (lcfilename.endsWith("htm")) { return "html"; } else if (lcfilename.endsWith("psml")) { return "psml"; } else if (lcfilename.endsWith("zip")) { return "zip"; } else { return filename.substring(filename.lastIndexOf(".") + 1); } } /** * To name. * * @param filename the filename * @return the name of file without extension. */ private static String toName(String filename) { int dot = filename.lastIndexOf('.'); return filename.substring(0, dot); } /** * Create a new package data from the specified file item if possible. * * @param item the item * @param filename the filename * @return the package data * @throws IOException Signals that an I/O exception has occurred. * @throws OXException the OX exception */ private static PackageData toPackageData(FileItem item, String filename, String model) throws IOException, OXException { if (StringUtils.isBlank(filename)) throw new OXException(OXErrorMessage.FILE_NOT_SELECTED); LOGGER.debug("Starts toPackageData {}/{}", model, filename); InputStream stream = item.getInputStream(); File dir = getTempUploadDirectory(); LOGGER.debug("Temp directory: {}", dir.getAbsolutePath()); if (!dir.exists()) { dir.mkdirs(); } LOGGER.debug("Is form field: {}", item.isFormField()); File file = new File(dir, filename); LOGGER.debug("Temp file: {}", file.getAbsolutePath()); int copied = copyTo(stream, file); //TODO Add session PackageData pack = PackageData.newPackageData(model, file); //TODO This property is used to create the package data (change this logic). pack.setProperty("contenttype", item.getContentType()); pack.setProperty("type", toType(filename)); pack.setProperty("name", toName(filename)); LOGGER.debug("Filename {}.", filename); if (copied == 0) { LOGGER.debug("Deleting file {}.", dir.getAbsolutePath()); FileUtils.deleteDirectory(dir); throw new OXException(OXErrorMessage.FILE_IS_EMPTY); } LOGGER.debug("Ends toPackageData {}/{}", model, filename); return pack; } /** * Ensure the configuration file is set. */ private static void ensureConfigured() { OXConfig config = OXConfig.get(); File dir = config.getModelsDirectory(); LOGGER.debug("Model Directory is null {}", dir == null); if (dir == null) { LOGGER.debug("Global Settings {}", GlobalSettings.getAppData()); config.setModelsDirectory(new File(GlobalSettings.getAppData(), "model")); } } /** * Gets the filename. * * @param item the item * @return the filename * @throws OXException the OX exception */ private static String getFilename(FileItem item) throws OXException { String filename = item.getName(); LOGGER.debug("Original filename {}", filename); if (!StringUtils.isBlank(filename)) { //It is necessary because the Internet Explore and Edge send the full path of the file //the this method remove all unnecessary path and returns the file name. filename = FilenameUtils.getName(filename); LOGGER.debug("Cleaned filename {}", filename); } else { LOGGER.debug("The uploaded file name is empty it may be because any file has been selected."); throw new OXException(OXErrorMessage.FILE_NOT_SELECTED); } return filename; } /** * Gets the temp upload directory. * * @return the temp upload directory * @throws IOException Signals that an I/O exception has occurred. */ private static File getTempUploadDirectory() throws IOException { File tempUploadOX = OXConfig.getOXTempUploadFolder(); File tempDirectory = Files.createTempDirectory(tempUploadOX.toPath(), "upload").toFile(); LOGGER.debug("Temporary upload directory {}", tempDirectory.getAbsolutePath()); return tempDirectory; } }
/* * Copyright (C) 2014-2016 LinkedIn Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. */ package gobblin.compaction.dataset; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.Path; import com.google.common.collect.Lists; import lombok.extern.slf4j.Slf4j; import gobblin.compaction.mapreduce.MRCompactor; import gobblin.configuration.State; import gobblin.dataset.FileSystemDataset; /** * A class that represents a dataset whose data should be compacted. * * @author Ziyang Liu */ @Slf4j public class Dataset implements Comparable<Dataset>, FileSystemDataset { public static final double DEFAULT_PRIORITY = 1.0; public static final double DEFAULT_PRIORITY_REDUCTION_FACTOR = 1.0 / 3.0; public enum DatasetState { // The data completeness of this dataset has not been verified. UNVERIFIED, // The data completeness of this dataset has been verified. VERIFIED, // The data completeness of this dataset has timed out. In this case it is configurable whether the // compactor should or should not compact this dataset. GIVEN_UP, // Compaction of this data set has been completed (which may have either succeeded or failed). COMPACTION_COMPLETE } public static class Builder { private Path inputPath; private Path inputLatePath; private Path outputPath; private Path outputLatePath; private Path outputTmpPath; private double priority = DEFAULT_PRIORITY; private double lateDataThresholdForRecompact; public Builder withInputPath(Path inputPath) { this.inputPath = inputPath; return this; } public Builder withInputLatePath(Path inputLatePath) { this.inputLatePath = inputLatePath; return this; } public Builder withOutputPath(Path outputPath) { this.outputPath = outputPath; return this; } public Builder withOutputLatePath(Path outputLatePath) { this.outputLatePath = outputLatePath; return this; } public Builder withOutputTmpPath(Path outputTmpPath) { this.outputTmpPath = outputTmpPath; return this; } public Builder withPriority(double priority) { this.priority = priority; return this; } public Builder withLateDataThresholdForRecompact(double lateDataThresholdForRecompact) { this.lateDataThresholdForRecompact = lateDataThresholdForRecompact; return this; } public Dataset build() { return new Dataset(this); } } private final Path outputPath; private final Path outputLatePath; private final Path outputTmpPath; private final List<Path> additionalInputPaths; private final Collection<Throwable> throwables; private Path inputPath; private Path inputLatePath; private State jobProps; private double priority; private double lateDataThresholdForRecompact; private boolean needToRecompact; private AtomicReference<DatasetState> state; private Dataset(Builder builder) { this.inputPath = builder.inputPath; this.inputLatePath = builder.inputLatePath; this.outputPath = builder.outputPath; this.outputLatePath = builder.outputLatePath; this.outputTmpPath = builder.outputTmpPath; this.additionalInputPaths = Lists.newArrayList(); this.throwables = Collections.synchronizedCollection(Lists.<Throwable> newArrayList()); this.priority = builder.priority; this.lateDataThresholdForRecompact = builder.lateDataThresholdForRecompact; this.state = new AtomicReference<>(DatasetState.UNVERIFIED); this.jobProps = new State(); } /** * Input path that contains the data of this {@link Dataset} to be compacted. */ public Path inputPath() { return this.inputPath; } /** * Path that contains the late data of this {@link Dataset} to be compacted. * Late input data may be generated if the input data is obtained from another compaction, * e.g., if we run hourly compaction and daily compaction on a topic where the compacted hourly * data is the input to the daily compaction. * * If this path contains any data and this {@link Dataset} is not already compacted, deduplication * will be applied to this {@link Dataset}. */ public Path inputLatePath() { return this.inputLatePath; } /** * Output path for the compacted data. */ public Path outputPath() { return this.outputPath; } /** * If {@link #outputPath()} is already compacted and new input data is found, those data can be copied * to this path. */ public Path outputLatePath() { return this.outputLatePath; } /** * The path where the MR job writes output to. Data will be published to {@link #outputPath()} if the compaction * is successful. */ public Path outputTmpPath() { return this.outputTmpPath; } public double lateDataThresholdForRecompact() { return this.lateDataThresholdForRecompact; } public boolean needToRecompact() { return this.needToRecompact; } /** * Additional paths of this {@link Dataset} besides {@link #inputPath()} that contain data to be compacted. */ public List<Path> additionalInputPaths() { return this.additionalInputPaths; } /** * Add an additional input path for this {@link Dataset}. */ public void addAdditionalInputPath(Path path) { this.additionalInputPaths.add(path); } /** * Add additional input paths for this {@link Dataset}. */ public void addAdditionalInputPaths(Collection<Path> paths) { this.additionalInputPaths.addAll(paths); } public double priority() { return this.priority; } public DatasetState state() { return this.state.get(); } /** * Reduce the priority of the dataset by {@link #DEFAULT_PRIORITY_REDUCTION_FACTOR}. * @return the reduced priority */ public double reducePriority() { return reducePriority(DEFAULT_PRIORITY_REDUCTION_FACTOR); } /** * Reduce the priority of the dataset. * @param reductionFactor the reduction factor. The priority will be reduced by reductionFactor. * @return the reduced priority */ public double reducePriority(double reductionFactor) { this.priority *= 1.0 - reductionFactor; return this.priority; } public void checkIfNeedToRecompact(long lateDataCount, long nonLateDataCount) { double lateDataPercent = lateDataCount * 1.0 / (lateDataCount + nonLateDataCount); log.info("Late data percentage is " + lateDataPercent + " and threshold is " + this.lateDataThresholdForRecompact); if (lateDataPercent > this.lateDataThresholdForRecompact) { this.needToRecompact = true; } } public void setState(DatasetState state) { this.state.set(state); } /** * Sets the {@link DatasetState} of the {@link Dataset} to the given updated value if the * current value == the expected value. */ public void compareAndSetState(DatasetState expect, DatasetState update) { this.state.compareAndSet(expect, update); } public State jobProps() { return this.jobProps; } public void setJobProps(State jobProps) { this.jobProps.addAll(jobProps); } public void setJobProp(String key, Object value) { this.jobProps.setProp(key, value); } public void setInputPath(Path newInputPath) { this.inputPath = newInputPath; } public void setInputLatePath(Path newInputLatePath) { this.inputLatePath = newInputLatePath; } public void resetNeedToRecompact() { this.needToRecompact = false; } /** * Modify an existing dataset to recompact from its ouput path. */ public void modifyDatasetForRecompact(State recompactState) { this.setJobProps(recompactState); this.setInputPath(this.outputPath); this.setInputLatePath(this.outputLatePath); this.addAdditionalInputPath(this.outputLatePath); this.resetNeedToRecompact(); } /** * Get dataset URN, which equals {@link #outputPath} by removing {@link MRCompactor#COMPACTION_JOB_DEST_PARTITION} * and {@link MRCompactor#COMPACTION_DEST_SUBDIR}, if any. */ public String getUrn() { return this.simplifyOutputPath().toString(); } /** * Get dataset name, which equals {@link Path#getName()} of {@link #outputPath} after removing * {@link MRCompactor#COMPACTION_JOB_DEST_PARTITION} and {@link MRCompactor#COMPACTION_DEST_SUBDIR}, if any. */ public String getName() { return this.simplifyOutputPath().getName(); } private Path simplifyOutputPath() { Path simplifiedPath = new Path(StringUtils.removeEnd(this.outputPath.toString(), this.jobProps().getProp(MRCompactor.COMPACTION_JOB_DEST_PARTITION, StringUtils.EMPTY))); simplifiedPath = new Path(StringUtils.removeEnd(simplifiedPath.toString(), this.jobProps().getProp(MRCompactor.COMPACTION_DEST_SUBDIR, MRCompactor.DEFAULT_COMPACTION_DEST_SUBDIR))); return simplifiedPath; } public Collection<Throwable> throwables() { return this.throwables; } /** * Record a {@link Throwable} in a {@link Dataset}. */ public void addThrowable(Throwable t) { this.throwables.add(t); } /** * Skip the {@link Dataset} by setting its {@link DatasetState} to {@link DatasetState#COMPACTION_COMPLETE}, * and record the given {@link Throwable} in the {@link Dataset}. */ public void skip(Throwable t) { this.setState(DatasetState.COMPACTION_COMPLETE); this.throwables.add(t); } @Override public int compareTo(Dataset o) { return Double.compare(o.priority, this.priority); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((this.inputPath == null) ? 0 : this.inputPath.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof Dataset)) { return false; } Dataset other = (Dataset) obj; if (this.inputPath == null) { if (other.inputPath != null) { return false; } } else if (!this.inputPath.equals(other.inputPath)) { return false; } return true; } /** * @return the {@link Path} of the {@link Dataset}. */ @Override public String toString() { return this.inputPath.toString(); } @Override public Path datasetRoot() { return this.outputPath; } @Override public String datasetURN() { return this.datasetRoot().toString(); } }
package clickr; import android.content.Intent; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.text.Editable; import android.text.Html; import android.text.TextWatcher; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.RadioButton; import android.widget.RadioGroup; import android.widget.RadioGroup.OnCheckedChangeListener; import android.widget.TextView; import android.widget.Toast; import com.iitbombay.clickr.R; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.Date; import java.util.HashMap; import java.util.logging.Level; import java.util.logging.Logger; import support.Question; import support.Utils; /** * Fragment to display question for attempting inside activity * * @author bhargav */ public class QuestionFragment extends Fragment { private static String CLASSNAME = "QuestionFragment"; private final static Logger LOGGER = Logger.getLogger(CLASSNAME); TextView txtvw_title; TextView txtvw_question; RadioGroup rg_options; LinearLayout ll_checkboxes; LinearLayout ll_truefalse; Button btn_true; Button btn_false; EditText edtxt_textual; String title; String qtext; String qtype; String qkind; Integer option_count; HashMap<Integer, Integer> optionIds = new HashMap<Integer, Integer>(); JSONArray mcqtemp = new JSONArray(); LayoutInflater layoutinflater; FragmentActivity fragactivity; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { layoutinflater = inflater; fragactivity = getActivity(); // Inflate the layout for this fragment return inflater.inflate(R.layout.fragment_question, container, false); } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); Question.startTime = new Date().getTime(); txtvw_title = (TextView) fragactivity.findViewById(R.id.txtvw_title); txtvw_question = (TextView) fragactivity.findViewById(R.id.txtvw_question); try { title = (String) Question.question.get("title"); qtext = (String) Question.question.get("qtext"); qtype = (String) Question.question.get("qtype"); qkind = (String) Question.question.get("qkind"); option_count = (Integer) Question.question.get("option_count"); } catch (JSONException e) { e.printStackTrace(); LOGGER.log(Level.SEVERE, "Parsing Question/Options error", e); Toast.makeText(fragactivity, "Parsing Question/Options error", Toast.LENGTH_SHORT).show(); gotoLoginPage(); } if (!title.equals("")) { txtvw_title.setVisibility(View.VISIBLE); txtvw_title.setText(title); } txtvw_question.setText(Html.fromHtml("<b>Q) </b>" + qtext)); //call that particular method according to question type if (qtype.equals("single")) singleMCQInit(); else if (qtype.equals("multiple")) multipleMCQinit(); else if (qtype.equals("truefalse")) truefalseInit(); else wordTextualInit(); } /** * Display single mcq question */ void singleMCQInit() { rg_options = (RadioGroup) fragactivity.findViewById(R.id.rg_options); rg_options.setVisibility(View.VISIBLE); for (int i = 0; i < option_count; i++) { RadioButton row = (RadioButton) layoutinflater.inflate(R.layout.template_radiobtn, rg_options, false); try { JSONObject option = (JSONObject) Question.options.get(i); row.setText((String) option.get("optext")); } catch (JSONException e) { LOGGER.log(Level.SEVERE, "Json Error while setting the options", e); e.printStackTrace(); gotoLoginPage(); } rg_options.addView(row); row.setId(Utils.generateViewId()); optionIds.put(row.getId(), i + 1); } rg_options.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(RadioGroup group, int checkedId) { LOGGER.info(checkedId + " is checked now"); Question.answers = new JSONArray(); Question.answers.put(optionIds.get(checkedId)); } }); } /** * Display multiple mcq question */ void multipleMCQinit() { ll_checkboxes = (LinearLayout) fragactivity.findViewById(R.id.ll_checkboxes); ll_checkboxes.setVisibility(View.VISIBLE); mcqtemp = new JSONArray(); for (int i = 0; i < option_count; i++) { CheckBox row = (CheckBox) layoutinflater.inflate(R.layout.template_checkbox, ll_checkboxes, false); try { JSONObject option = (JSONObject) Question.options.get(i); row.setText((String) option.get("optext")); } catch (JSONException e) { LOGGER.log(Level.SEVERE, "Json Error while setting the options", e); e.printStackTrace(); gotoLoginPage(); } ll_checkboxes.addView(row); row.setId(Utils.generateViewId()); optionIds.put(row.getId(), i + 1); mcqtemp.put(false); row.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { boolean checked = ((CheckBox) view).isChecked(); int checkedid = optionIds.get(view.getId()); try { mcqtemp.put(checkedid, checked); Question.answers = new JSONArray(); for (int i = 0; i < mcqtemp.length(); i++) { if (mcqtemp.getBoolean(i)) Question.answers.put(i); } } catch (JSONException e) { e.printStackTrace(); LOGGER.log(Level.SEVERE, "Json on checkbox click error: ", e); } } }); } } /** * Display true or false question */ void truefalseInit() { ll_truefalse = (LinearLayout) fragactivity.findViewById(R.id.ll_truefalse); ll_truefalse.setVisibility(View.VISIBLE); btn_true = (Button) fragactivity.findViewById(R.id.btn_true); btn_false = (Button) fragactivity.findViewById(R.id.btn_false); btn_true.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { btn_true.setBackgroundResource(R.drawable.btn_green_style); btn_true.setTextColor(fragactivity.getResources().getColor(android.R.color.white)); btn_false.setBackgroundResource(R.drawable.btn_grey_style); btn_false.setTextColor(fragactivity.getResources().getColor(android.R.color.black)); Question.answers = new JSONArray(); Question.answers.put(true); } }); btn_false.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { btn_false.setBackgroundResource(R.drawable.btn_red_style); btn_false.setTextColor(fragactivity.getResources().getColor(android.R.color.white)); btn_true.setBackgroundResource(R.drawable.btn_grey_style); btn_true.setTextColor(fragactivity.getResources().getColor(android.R.color.black)); Question.answers = new JSONArray(); Question.answers.put(false); } }); } /** * Display word question */ void wordTextualInit() { edtxt_textual = (EditText) fragactivity.findViewById(R.id.edtxt_textual); edtxt_textual.setVisibility(View.VISIBLE); Question.answers = new JSONArray(); edtxt_textual.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void afterTextChanged(Editable s) { String result = ""; if (qtype.equals("word")) { result = s.toString().replaceAll(" ", ""); if (!s.toString().equals(result)) { edtxt_textual.setText(result); edtxt_textual.setSelection(result.length()); } } else if (qtype.equals("short")) result = s.toString(); else if (qtype.equals("integer")) { result = s.toString(); if (result.matches("[-+]?")) return; else { try { Integer test = Integer.parseInt(result); test++; } catch (NumberFormatException e) { result = result.substring(0, result.length() - 1); edtxt_textual.setText(result); edtxt_textual.setSelection(result.length()); } } } else if (qtype.equals("float")) { result = s.toString(); if (result.matches("[-+]?")) return; else { try { Double test = Double.parseDouble(result); test++; } catch (NumberFormatException e) { result = result.substring(0, result.length() - 1); edtxt_textual.setText(result); edtxt_textual.setSelection(result.length()); } } } else { result = ""; LOGGER.info("Textual with undefined type!"); } try { Question.answers.put(0, result.trim()); } catch (JSONException e) { e.printStackTrace(); LOGGER.log(Level.SEVERE, "Json error!", e); } } }); } /** * disable question options */ public void disableBtns() { if (qtype.equals("single")) { for (int i = 0; i < rg_options.getChildCount(); i++) { rg_options.getChildAt(i).setEnabled(false); } } else if (qtype.equals("multiple")) { for (int i = 0; i < ll_checkboxes.getChildCount(); i++) { ll_checkboxes.getChildAt(i).setEnabled(false); } } else if (qtype.equals("truefalse")) { btn_true.setEnabled(false); btn_false.setEnabled(false); } else { edtxt_textual.setEnabled(false); } } public void gotoLoginPage() { Intent intent = new Intent(fragactivity, LoginPage.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(intent); } }
package org.jetbrains.plugins.ipnb.editor.panels; import com.google.common.collect.Lists; import com.intellij.ide.BrowserUtil; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.event.DocumentAdapter; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ex.ProjectEx; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.VerticalFlowLayout; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.psi.PsiDocumentManager; import com.intellij.ui.JBColor; import com.intellij.util.Alarm; import com.intellij.util.PlatformUtils; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.ipnb.IpnbUtils; import org.jetbrains.plugins.ipnb.editor.IpnbEditorUtil; import org.jetbrains.plugins.ipnb.editor.IpnbFileEditor; import org.jetbrains.plugins.ipnb.editor.actions.IpnbCutCellAction; import org.jetbrains.plugins.ipnb.editor.actions.IpnbDeleteCellAction; import org.jetbrains.plugins.ipnb.editor.actions.IpnbPasteCellAction; import org.jetbrains.plugins.ipnb.editor.panels.code.IpnbCodePanel; import org.jetbrains.plugins.ipnb.format.IpnbFile; import org.jetbrains.plugins.ipnb.format.IpnbParser; import org.jetbrains.plugins.ipnb.format.cells.*; import javax.swing.*; import java.awt.*; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; public class IpnbFilePanel extends JPanel implements Scrollable, DataProvider, Disposable { private static final Logger LOG = Logger.getInstance(IpnbFilePanel.class); private final DocumentAdapter myDocumentListener; private final Document myDocument; private final MessageBusConnection myBusConnection; private IpnbFile myIpnbFile; private final Project myProject; @NotNull private final IpnbFileEditor myParent; @NotNull private final VirtualFile myVirtualFile; @NotNull private final IpnbFileEditor.CellSelectionListener myListener; private final List<IpnbEditablePanel> myIpnbPanels = Lists.newArrayList(); @Nullable private IpnbEditablePanel mySelectedCellPanel; private IpnbEditablePanel myBufferPanel; private int myInitialSelection = 0; private boolean mySynchronize; public IpnbFilePanel(@NotNull final Project project, @NotNull final IpnbFileEditor parent, @NotNull final VirtualFile vFile, @NotNull final IpnbFileEditor.CellSelectionListener listener) { super(new VerticalFlowLayout(VerticalFlowLayout.TOP, 100, 5, true, false)); myProject = project; myParent = parent; myVirtualFile = vFile; myListener = listener; setBackground(IpnbEditorUtil.getBackground()); final Alarm alarm = new Alarm(); myDocumentListener = new DocumentAdapter() { public void documentChanged(final DocumentEvent e) { if (mySynchronize) { alarm.cancelAllRequests(); alarm.addRequest(new MySynchronizeRequest(), 10, ModalityState.stateForComponent(IpnbFilePanel.this)); } mySynchronize = true; } }; myDocument = myParent.getDocument(); myDocument.addDocumentListener(myDocumentListener); alarm.addRequest(() -> { readFromFile(true); addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { updateCellSelection(e); } }); setFocusable(true); }, 10, ModalityState.stateForComponent(this)); UIUtil.requestFocus(this); myBusConnection = ApplicationManager.getApplication().getMessageBus().connect(); myBusConnection.subscribe(ProjectEx.ProjectSaved.TOPIC, new ProjectEx.ProjectSaved() { @Override public void saved(@NotNull Project project) { CommandProcessor.getInstance().runUndoTransparentAction( () -> ApplicationManager.getApplication() .runWriteAction(() -> saveToFile(false))); } }); } @Override public void dispose() { myDocument.removeDocumentListener(myDocumentListener); Disposer.dispose(myBusConnection); } private void readFromFile(boolean showError) { try { removeAll(); myIpnbFile = IpnbParser.parseIpnbFile(myDocument, myVirtualFile); myIpnbPanels.clear(); mySelectedCellPanel = null; if (myIpnbFile.getCells().isEmpty()) { CommandProcessor.getInstance().runUndoTransparentAction(() -> ApplicationManager.getApplication().runWriteAction(() -> { createAndAddCell(true, IpnbCodeCell.createEmptyCodeCell()); saveToFile(true); })); } } catch (IOException e) { if (showError) { Messages.showErrorDialog(getProject(), e.getMessage(), "Can't open " + myVirtualFile.getPath()); } else { LOG.error(e.getMessage(), "Can't open " + myVirtualFile.getPath()); } } layoutFile(); } public List<IpnbEditablePanel> getIpnbPanels() { return myIpnbPanels; } private void layoutFile() { addWarningIfNeeded(); final List<IpnbCell> cells = myIpnbFile.getCells(); for (IpnbCell cell : cells) { addCellToPanel(cell); } if (myInitialSelection >= 0 && myIpnbPanels.size() > myInitialSelection) { final IpnbEditablePanel toSelect = myIpnbPanels.get(myInitialSelection); setSelectedCellPanel(toSelect); } add(createEmptyPanel()); ApplicationManager.getApplication().invokeLater(() -> { if (mySelectedCellPanel != null) { myParent.updateScrollPosition(mySelectedCellPanel); } }); } private void addWarningIfNeeded() { if (IpnbUtils.hasFx()) return; final String text; final String href; if (PlatformUtils.isPyCharm()) { href = "https://www.jetbrains.com/pycharm/download/"; text = "<html><a href=\"https://www.jetbrains.com/pycharm/download/\">Download PyCharm</a> with bundled JDK for better " + "Markdown cell rendering</html>"; } else { href = "https://confluence.jetbrains.com/display/PYH/Pycharm+2016.1+Jupyter+Notebook+rendering"; text = "<html>Follow instructions <a href=\"https://confluence.jetbrains.com/display/PYH/Pycharm+2016.1+Jupyter+Notebook+rendering\">" + "here</a> for better Markdown cell rendering</html>"; } final JLabel warning = new JLabel(text, SwingConstants.CENTER); warning.setForeground(JBColor.RED); warning.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { BrowserUtil.browse(href); } }); add(warning); } private void addCellToPanel(IpnbCell cell) { IpnbEditablePanel panel; if (cell instanceof IpnbCodeCell) { panel = new IpnbCodePanel(myProject, myParent, (IpnbCodeCell)cell); add(panel); myIpnbPanels.add(panel); } else if (cell instanceof IpnbMarkdownCell) { panel = new IpnbMarkdownPanel((IpnbMarkdownCell)cell, this); addComponent(panel); } else if (cell instanceof IpnbHeadingCell) { panel = new IpnbHeadingPanel((IpnbHeadingCell)cell); addComponent(panel); } else if (cell instanceof IpnbRawCell) { // A raw cell is defined as content that should be included unmodified in nbconvert output. // It's not visible for user } else { throw new UnsupportedOperationException(cell.getClass().toString()); } } public void createAndAddCell(final boolean below, IpnbCodeCell cell) { final IpnbCodePanel codePanel = new IpnbCodePanel(myProject, myParent, cell); addCell(codePanel, below); } private void addCell(@NotNull final IpnbEditablePanel panel, boolean below) { final IpnbEditablePanel selectedCellPanel = getSelectedCellPanel(); int index = myIpnbPanels.indexOf(selectedCellPanel); if (below) { index += 1; } final IpnbEditableCell cell = panel.getCell(); myIpnbFile.addCell(cell, index); myIpnbPanels.add(index, panel); add(panel, index); setSelectedCellPanel(panel); } public void cutCell() { myBufferPanel = getSelectedCellPanel(); if (myBufferPanel == null) return; deleteSelectedCell(); } public void moveCell(boolean down) { final IpnbEditablePanel selectedCellPanel = getSelectedCellPanel(); if (selectedCellPanel == null) return; final int index = getSelectedIndex(); int siblingIndex = down ? index + 1 : index - 1; if (myIpnbPanels.size() <= siblingIndex && down) { return; } if (siblingIndex < 0 && !down) { return; } if (down) { final IpnbEditableCell cell = selectedCellPanel.getCell(); deleteSelectedCell(); myIpnbFile.addCell(cell, index + 1); myIpnbPanels.add(index + 1, selectedCellPanel); add(selectedCellPanel, index + 1); selectPrev(selectedCellPanel); setSelectedCellPanel(selectedCellPanel); } else { final IpnbEditablePanel siblingPanel = myIpnbPanels.get(siblingIndex); deleteCell(siblingPanel); addCell(siblingPanel, true); setSelectedCellPanel(selectedCellPanel); } saveToFile(false); } public void mergeCell(boolean below) { final IpnbEditablePanel currentCellPanel = getSelectedCellPanel(); if (currentCellPanel == null) return; if (below) { selectNext(currentCellPanel, false); } else { selectPrev(currentCellPanel); } final IpnbEditablePanel cellToMergePanel = getSelectedCellPanel(); final IpnbCell cellToMerge = cellToMergePanel.getCell(); final List<String> currentCellSource = getCellSource(currentCellPanel); final List<String> cellToMergeSource = ((IpnbEditableCell)cellToMerge).getSource(); final ArrayList<String> source = mergeCellsSource(currentCellSource, cellToMergeSource, below); ((IpnbEditableCell)cellToMerge).setSource(source); cellToMergePanel.updateCellView(); actualizeCellData(cellToMerge); currentCellPanel.repaint(); deleteCell(currentCellPanel); saveToFile(false); } private static ArrayList<String> mergeCellsSource(@NotNull List<String> currentCellSource, @NotNull List<String> cellToMergeSource, boolean below) { final ArrayList<String> source = new ArrayList<>(); if (below) { source.addAll(currentCellSource); source.add("\n"); source.addAll(cellToMergeSource); } else { source.addAll(cellToMergeSource); source.add("\n"); source.addAll(currentCellSource); } return source; } private static void actualizeCellData(@NotNull IpnbCell cell) { if (cell instanceof IpnbCodeCell) { ((IpnbCodeCell)cell).removeCellOutputs(); ((IpnbCodeCell)cell).setPromptNumber(null); } } public void splitCell() { final IpnbEditablePanel selectedCellPanel = getSelectedCellPanel(); if (selectedCellPanel == null) return; final IpnbEditableCell cell = selectedCellPanel.getCell(); final int position = selectedCellPanel.getCaretPosition(); if (position == -1) return; final String oldCellText = selectedCellPanel.getText(0, position); final String newCellText = selectedCellPanel.getText(position); if (oldCellText != null) { final JTextArea editablePanel = selectedCellPanel.getEditableTextArea(); if (editablePanel != null) { editablePanel.setText(oldCellText); } selectedCellPanel.getCell().setSource(createCellSourceFromText(oldCellText)); actualizeCellData(cell); selectedCellPanel.updateCellView(); } IpnbEditablePanel panel; final ArrayList<String> newCellSource = createCellSourceFromText(newCellText); panel = createPanel(cell, newCellSource); addCell(panel, true); saveToFile(false); } @NotNull private IpnbEditablePanel createPanel(@NotNull IpnbEditableCell cell, @NotNull ArrayList<String> newCellSource) { if (cell instanceof IpnbCodeCell) { final IpnbCodeCell codeCell = (IpnbCodeCell)cell; final IpnbCodeCell ipnbCodeCell = new IpnbCodeCell(codeCell.getLanguage(), newCellSource, codeCell.getPromptNumber(), codeCell.getCellOutputs(), codeCell.getMetadata()); return new IpnbCodePanel(myProject, myParent, ipnbCodeCell); } else if (cell instanceof IpnbMarkdownCell) { final IpnbMarkdownCell markdownCell = new IpnbMarkdownCell(newCellSource, cell.getMetadata()); return new IpnbMarkdownPanel(markdownCell, this); } else { final IpnbHeadingCell headingCell = new IpnbHeadingCell(newCellSource, ((IpnbHeadingCell)cell).getLevel(), cell.getMetadata()); return new IpnbHeadingPanel(headingCell); } } private static ArrayList<String> createCellSourceFromText(@NotNull String oldCellText) { final ArrayList<String> source = new ArrayList<>(); source.addAll(Arrays.stream(oldCellText.split("\n")).map(s -> s + "\n").collect(Collectors.toList())); return source; } @NotNull private static List<String> getCellSource(@NotNull IpnbEditablePanel cellPanel) { final IpnbCell cell = cellPanel.getCell(); return ((IpnbEditableCell)cell).getSource(); } public void deleteSelectedCell() { final IpnbEditablePanel selectedCellPanel = getSelectedCellPanel(); if (selectedCellPanel != null) { deleteCell(selectedCellPanel); } } private void deleteCell(@NotNull final IpnbEditablePanel cell) { final int index = myIpnbPanels.indexOf(cell); if (index < 0) return; myIpnbPanels.remove(index); myIpnbFile.removeCell(index); remove(index); if (!myIpnbPanels.isEmpty()) { int indexToSelect = index < myIpnbPanels.size() ? index : index - 1; final IpnbEditablePanel panel = myIpnbPanels.get(indexToSelect); setSelectedCell(panel, false); } } public void saveToFile(boolean synchronize) { mySynchronize = synchronize; final String oldText = myDocument.getText(); final String newText = IpnbParser.newDocumentText(this); if (newText == null) return; if (oldText.equals(newText) && mySynchronize) { new Alarm().addRequest(new MySynchronizeRequest(), 10, ModalityState.stateForComponent(this)); mySynchronize = false; return; } try { final ReplaceInfo replaceInfo = findFragmentToChange(oldText, newText); if (replaceInfo.getStartOffset() != -1) { myDocument.replaceString(replaceInfo.getStartOffset(), replaceInfo.getEndOffset(), replaceInfo.getReplacement()); } } catch (Exception e) { myDocument.replaceString(0, oldText.length(), newText); } } public static final class ReplaceInfo { private final int myStartOffset; private final int myEndOffset; private final String myReplacement; public ReplaceInfo(final int startOffset, final int endOffset, final String replacement) { myStartOffset = startOffset; myEndOffset = endOffset; myReplacement = replacement; } public int getStartOffset() { return myStartOffset; } public int getEndOffset() { return myEndOffset; } public String getReplacement() { return myReplacement; } } public static ReplaceInfo findFragmentToChange(@NotNull final String oldText, @NotNull final String newText) { if (oldText.equals(newText)) { return new ReplaceInfo(-1, -1, null); } final int oldLength = oldText.length(); final int newLength = newText.length(); int startOffset = 0; while ( startOffset < oldLength && startOffset < newLength && oldText.charAt(startOffset) == newText.charAt(startOffset) ) { startOffset++; } int endOffset = oldLength; while (true) { if (endOffset <= startOffset) { break; } final int idxInNew = newLength - (oldLength - endOffset) - 1; if (idxInNew < startOffset) { break; } final char c1 = oldText.charAt(endOffset - 1); final char c2 = newText.charAt(idxInNew); if (c1 != c2) { break; } endOffset--; } return new ReplaceInfo(startOffset, endOffset, newText.substring(startOffset, newLength - (oldLength - endOffset))); } private class MySynchronizeRequest implements Runnable { public void run() { final Project project = getProject(); if (project.isDisposed()) { return; } if (Disposer.isDisposed(myParent)) { return; } PsiDocumentManager.getInstance(project).commitDocument(myDocument); final IpnbEditablePanel selectedCellPanel = getSelectedCellPanel(); final int index = myIpnbPanels.indexOf(selectedCellPanel); myInitialSelection = index >= 0 && index < myIpnbPanels.size() ? index : myIpnbPanels.size() - 1; readFromFile(false); } } public void copyCell() { myBufferPanel = getSelectedCellPanel(); } public void pasteCell() { if (myBufferPanel == null) return; final IpnbEditablePanel editablePanel = (IpnbEditablePanel)myBufferPanel.clone(); addCell(editablePanel, true); } public void replaceComponent(@NotNull final IpnbEditablePanel from, @NotNull final IpnbCell cell) { CommandProcessor.getInstance().executeCommand(getProject(), () -> ApplicationManager.getApplication().runWriteAction(() -> { final int index = myIpnbPanels.indexOf(from); IpnbEditablePanel panel; if (cell instanceof IpnbCodeCell) { panel = new IpnbCodePanel(myProject, myParent, (IpnbCodeCell)cell); } else if (cell instanceof IpnbMarkdownCell) { panel = new IpnbMarkdownPanel((IpnbMarkdownCell)cell, myParent.getIpnbFilePanel()); } else if (cell instanceof IpnbHeadingCell) { panel = new IpnbHeadingPanel((IpnbHeadingCell)cell); } else { throw new UnsupportedOperationException(cell.getClass().toString()); } if (index >= 0) { myIpnbFile.removeCell(index); myIpnbPanels.remove(index); remove(index); myIpnbFile.addCell(cell, index); myIpnbPanels.add(index, panel); add(panel, index); } if (from instanceof IpnbCodePanel) { panel.switchToEditing(); } setSelectedCellPanel(panel); saveToFile(false); }), "Ipnb.changeCellType", new Object()); } private void addComponent(@NotNull final IpnbEditablePanel comp) { add(comp); myIpnbPanels.add(comp); } private static JPanel createEmptyPanel() { JPanel panel = new JPanel(); panel.setBackground(IpnbEditorUtil.getBackground()); return panel; } @Override protected void processKeyEvent(KeyEvent e) { if (mySelectedCellPanel != null && e.getID() == KeyEvent.KEY_PRESSED) { if (e.getKeyCode() == KeyEvent.VK_ENTER) { mySelectedCellPanel.switchToEditing(); repaint(); } if (e.getKeyCode() == KeyEvent.VK_UP) { selectPrev(mySelectedCellPanel); } else if (e.getKeyCode() == KeyEvent.VK_DOWN) { selectNext(mySelectedCellPanel, false); } else if (e.getKeyCode() == KeyEvent.VK_DELETE) { if (!mySelectedCellPanel.isEditing()) { IpnbDeleteCellAction.deleteCell(this); } } else if (e.getModifiers() == Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()) { if (e.getKeyCode() == KeyEvent.VK_X) { if (!mySelectedCellPanel.isEditing()) { IpnbCutCellAction.cutCell(this); } } else if (e.getKeyCode() == KeyEvent.VK_C) { if (!mySelectedCellPanel.isEditing()) { copyCell(); } } else if (e.getKeyCode() == KeyEvent.VK_V) { if (!mySelectedCellPanel.isEditing()) { IpnbPasteCellAction.pasteCell(this); } } } else { getParent().dispatchEvent(e); } } } public boolean hasNextCell(@NotNull IpnbEditablePanel cell) { int index = myIpnbPanels.indexOf(cell); return index < myIpnbPanels.size() - 1; } public boolean hasPrevCell(@NotNull IpnbEditablePanel cell) { int index = myIpnbPanels.indexOf(cell); return index > 0; } public void selectPrev(@NotNull IpnbEditablePanel cell) { int index = myIpnbPanels.indexOf(cell); if (index > 0) { setSelectedCellPanel(myIpnbPanels.get(index - 1)); } } public void selectNext(@NotNull IpnbEditablePanel cell, boolean addNew) { int index = myIpnbPanels.indexOf(cell); if (index < myIpnbPanels.size() - 1) { setSelectedCellPanel(myIpnbPanels.get(index + 1)); } else if (addNew) { createAndAddCell(true, IpnbCodeCell.createEmptyCodeCell()); CommandProcessor.getInstance().executeCommand(getProject(), () -> ApplicationManager.getApplication().runWriteAction( () -> saveToFile(false)), "Ipnb.runCell", new Object()); } } public void selectNextOrPrev(@NotNull IpnbEditablePanel cell) { int index = myIpnbPanels.indexOf(cell); if (index < myIpnbPanels.size() - 1) { setSelectedCellPanel(myIpnbPanels.get(index + 1)); } else if (index > 0) { setSelectedCellPanel(myIpnbPanels.get(index - 1)); } else { mySelectedCellPanel = null; repaint(); } } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); if (mySelectedCellPanel != null) { g.setColor(mySelectedCellPanel.isEditing() ? JBColor.GREEN : JBColor.GRAY); g.drawRoundRect(mySelectedCellPanel.getX() - 50, mySelectedCellPanel.getTop() - 1, mySelectedCellPanel.getWidth() + 145 - IpnbEditorUtil.PROMPT_SIZE.width, mySelectedCellPanel.getHeight() + 2, 5, 5); } } private void updateCellSelection(MouseEvent e) { if (e.getClickCount() > 0) { IpnbEditablePanel ipnbPanel = getIpnbPanelByClick(e.getPoint()); if (ipnbPanel != null) { ipnbPanel.setEditing(false); IdeFocusManager.getGlobalInstance().doWhenFocusSettlesDown(() -> { IdeFocusManager.getGlobalInstance().requestFocus(ipnbPanel, true); }); repaint(); setSelectedCell(ipnbPanel, true); } } } public void setInitialPosition(int index) { myInitialSelection = index; } public void setSelectedCellPanel(@NotNull final IpnbEditablePanel ipnbPanel) { setSelectedCell(ipnbPanel, false); } public void setSelectedCell(@NotNull final IpnbEditablePanel ipnbPanel, boolean mouse) { if (ipnbPanel.equals(mySelectedCellPanel)) return; if (mySelectedCellPanel != null) { mySelectedCellPanel.setEditing(false); } mySelectedCellPanel = ipnbPanel; revalidateAndRepaint(); if (ipnbPanel.getBounds().getHeight() != 0) { myListener.selectionChanged(ipnbPanel, mouse); } } public void revalidateAndRepaint() { revalidate(); UIUtil.requestFocus(this); repaint(); } @Nullable public IpnbEditablePanel getSelectedCellPanel() { return mySelectedCellPanel; } public int getSelectedIndex() { final IpnbEditablePanel selectedCellPanel = getSelectedCellPanel(); return myIpnbPanels.indexOf(selectedCellPanel); } @Nullable private IpnbEditablePanel getIpnbPanelByClick(@NotNull final Point point) { for (IpnbEditablePanel c : myIpnbPanels) { if (c.contains(point.y)) { return c; } } return null; } public IpnbFile getIpnbFile() { return myIpnbFile; } @Override public Dimension getPreferredScrollableViewportSize() { return null; } @Override public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) { return 10; } @Override public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) { return 100; } @Override public boolean getScrollableTracksViewportWidth() { return false; } @Override public boolean getScrollableTracksViewportHeight() { return false; } @Nullable @Override public Object getData(String dataId) { final IpnbEditablePanel selectedCellPanel = getSelectedCellPanel(); if (OpenFileDescriptor.NAVIGATE_IN_EDITOR.is(dataId)) { if (selectedCellPanel instanceof IpnbCodePanel) { return ((IpnbCodePanel)selectedCellPanel).getEditor(); } } if (IpnbFileEditor.DATA_KEY.is(dataId)) { return myParent; } return null; } public Project getProject() { return myProject; } @NotNull public VirtualFile getVirtualFile() { return myVirtualFile; } public Document getDocument() { return myDocument; } }
package com.mesaverde.village; import java.io.*; import java.util.*; import com.mesaverde.model.*; public class Database { VillageSpace world; // 1.2 VillageSpace deerWorld; AgentModelSwarm mySwarm; int tmp; float r1; int step; float proxfit1; float proxfit2; float proxfit3; float proxfitz; int popLevel; int worldTime = 600; int which_yield; int random_yield; int data_length; int array_length; int world_x_size = Village.WORLD_X_SIZE; int world_y_size = Village.WORLD_Y_SIZE; int[][] data = new int[world_y_size][world_x_size]; float info; int[] soilcheck = new int[153]; float[] almagre_array = new float[1383]; // JAC 9/25/04 almagre data float[] prin_array = new float[1383]; // JAC 9/25/04 prin data int wx, wy; String data_file; String water_file; String output_file; String pop_file; int deer_x_size = (int)(Math.ceil((double)Village.WORLD_X_SIZE / 5)); int deer_y_size = (int)(Math.ceil((double)Village.WORLD_Y_SIZE / 5)); String[] wfile = new String[140]; float[][] deer = new float[deer_y_size][deer_x_size]; // used for deer diffusion float[][] deerfood = new float[deer_y_size][deer_x_size]; // used for deer diffusion Scanner fp; Scanner wp; FileWriter Hout; FileWriter HHout; FileWriter BHout; FileWriter CCout; FileWriter popout; String theMethod; // DC: static variables - most of which are taken from methods private static int update = 600; private static int whichfile = 0; private static int water_length = 241; private static final int[] output_time = new int[] { Village.P6YEAR, Village.P7YEAR, Village.P8YEAR, Village.P9YEAR, Village.P10YEAR, Village.P11YEAR, Village.P12YEAR, Village.P13YEAR, Village.P14YEAR, Village.P15YEAR, Village.P16YEAR, Village.P17YEAR, Village.P18YEAR, Village.P19YEAR }; private static final String[] output_name = new String[] { "P6", "P7", "P8", "P9", "P10", "P11", "P12", "P13", "P14", "P15", "P16", "P17", "P18", "P19" }; public static void resetStatics() { update = 600; whichfile = 0; water_length = 241; } // initialization of database public void setDataFile(String name) { if (name.length() > 40) { System.err.println("Error: file name too long"); System.exit(-1); } data_file = name; try { fp = new Scanner(new File(data_file)); } catch (IOException e) { System.err.println("Error: can't open " + data_file); System.exit(-1); } } public void setWaterFile(String name) { if (name.length() > 40) { System.err.println("Error: file name too long"); System.exit(-1); } water_file = name; try { wp = new Scanner(new File(water_file)); } catch (IOException e) { System.err.println("Error: can't open " + water_file); System.exit(-1); } } public void setMySwarm(AgentModelSwarm s) { mySwarm = s; } public void setSelMethod(String method) { theMethod = method; // printf("%s\n", theMethod); } public void setDataFileLength(int num) { data_length = num; } // JC 9/24/04 used when reading in single array files public void setArrayFileLength(int num) { array_length = num; } public void setWhichYield(int a) { which_yield = a; } public void setRandomYield(int a) { random_yield = a; } public void setWorld(VillageSpace w) // 1.2 { world = w; } public void setDeerWorld(VillageSpace w) // 1.2 { deerWorld = w; } public Database createEnd() { if (world == null) { System.err .println("ERROR: must set world before ending database creation"); System.exit(-1); } // check on the size of the world wx = world.getSizeX(); if (wx > world_x_size) wx = world_x_size; wy = world.getSizeY(); if (wy > world_y_size) wy = world_y_size; tmp = 0; step = 0; return this; } // database actions public void closeDataFile() { fp.close(); } public void updateDataGrid() { int x, y; float i; if (data_length < 1) { if (which_yield == 0) data_length = 400; fp.close(); setDataFile(data_file); // DC: used to reset the scanner - equal to // rewind(fp) } // update yield world // data is stored as N-S is columns and W-E is rows for (y = 0; y < world_y_size; y++) { for (x = 0; x < world_x_size; x++) { i = fp.nextFloat(); data[y][x] = (int) i; } } data_length--; } // init spring amount resources // 12/02/04 JAC added to update flow rates for springs public void updateXYDataGrid() { int i; float x, y; float a, b, dubba; Cell cell; a = 0; b = 0; info = 0; String springs = "VEPI_data/spring/spring"; if (worldTime == 1300) return; else { if (worldTime == update) { String wyear = String.valueOf(update); springs += wyear; springs += ".data"; setWaterFile(springs); for (i = 0; i < water_length; i++) { a = wp.nextFloat(); b = wp.nextFloat(); info = wp.nextFloat(); dubba = info; x = a; y = b; if (x >= world_x_size || y >= world_y_size){ System.out.print("WARNING: Imported data does not match world dimensions!\n"); continue; } cell = (Cell) world.getObjectAt((int) x, (int) y); cell.setAvailableWater(dubba); } wp.close(); update += 5; whichfile++; water_length = 58; } } } public void setWorldTime(int t) { worldTime = t; } public void setPopLevel(int p) { popLevel = p; } // udated JC 9/24/04 public void updateCells() { // printf("begin updateCells\n"); int x, y; try { java.lang.reflect.Method method = Cell.class.getMethod(theMethod, Integer.TYPE); for (y = 0; y < wy; y++) { for (x = 0; x < wx; x++) { Cell cell = (Cell) world.getObjectAt(x, y); cell.setWorldTime(worldTime); cell.setAlmagre(almagre_array[worldTime - 600]); // tells cells what almagre // correction is cell.setPrin(prin_array[(worldTime - 600)]); // tells cells what prin correction // is method.invoke(cell, data[y][x]); //cell.perform(theMethod, data[y][x]); cell.refreshWater(); // These lines call a method in Cell that evaluates whether the cell includes the given UTMs, // then outputs that cell's productivity statistics to garden_cells.data. For Crow Canyon's plots. // cell.reportGardens(4136717, 711054); // cell.reportGardens(4136479, 710401); // cell.reportGardens(4136565, 710664); // cell.reportGardens(4136832, 711120); // cell.reportGardens(4136810, 710972); } } } catch (Exception e) { e.printStackTrace(); } } public void updateCWSingle() { int j, x, y; float i; float[] array = new float[array_length]; for (j = 0; j < array_length; j++) { i = fp.nextFloat(); array[j] = i; } try { float[] f = new float[1]; java.lang.reflect.Method method = Cell.class.getMethod(theMethod, f.getClass()); for (y = 0; y < wy; y++) { for (x = 0; x < wx; x++) { Cell cell = (Cell) world.getObjectAt(x, y); method.invoke(cell, array); //cell.perform(theMethod, array); } } } catch (Exception e) { e.printStackTrace(); } } public void setAlmagreArray() { int j; float i; worldTime = 600; for (j = 0; j < array_length; j++) { i = fp.nextFloat(); almagre_array[j] = i; } } public void setPrinArray() { int j; float i; for (j = 0; j < array_length; j++) { i = fp.nextFloat(); prin_array[j] = i; } } public void updateDeerCells() { int x, y; // int test1, test2; for (y = 0; y < wy / 5; y++) { for (x = 0; x < ((wx / 5) + 1); x++) { // printf("x = %d\n", x); DeerCell deercell; deercell = (DeerCell) deerWorld.getObjectAt(x, y); if (deercell == null) System.out.printf("deercell[%d][%d] = nil", x, y); deercell.setSoilProd(x, y, world); deercell.updateDeer(); deercell.updateCells(x, y, world); } } } public void createDeerMatrix() { int x, y; for (y = 0; y < wy / 5; y++) { for (x = 0; x < ((wx / 5) + 1); x++) { DeerCell deercell; deercell = (DeerCell) deerWorld.getObjectAt(x, y); deer[y][x] = deercell.getDeer(); deerfood[y][x] = deercell.getDeerProd(); // printf ("[%d][%d] deer = %f, deerfood= %f\n", // x,y,deer[y][x],deerfood[y][x]); } } } public void diffuseDeer() { if (Village.DIFFUSION) { createDeerMatrix(); // Here is the function that actually does the computation. // One should call this at every time step. int M = deer_y_size; int N = deer_x_size; float[][] newdeer = new float[deer_y_size][deer_x_size]; // temporary storage float ccapacity; // carrying capacity float r = Village.DEER_R; // the intrinsic rate of growth float gamma; // computed quantity float diffusionterm; // temporary float diffcoeff = .1f; // the diffusion coefficient originally set // .1 float cellwidth = 1.0f; // the width of a cell. int i, j; if (Village.EXPLICIT) { gamma = (float) Math.exp(-r); for (j = 0; j < N; j++) { for (i = 0; i < M; i++) { ccapacity = deerfood[i][j] / .55f; newdeer[i][j] = ccapacity * deer[i][j] / (.001f + (deer[i][j] + gamma * (ccapacity - deer[i][j]))); } } /* Solve for the new deer vector */ // printf(" in deer: %f %f\n",newdeer[3][3],deerfood[3][3]); pcg(deer[0], newdeer[0]); // DC: May have problems with this, // originally was &deer[0][0] } else { gamma = (float) Math.exp(-r); // printf ("gamma = %f",gamma); // ccapacity = deerfood/0.55/2.0; diffcoeff = diffcoeff / (cellwidth * cellwidth); // First handle interior terms. The new values go into newdeer // for // the moment. We broke this up into three "for" loops in order // to // avoid extra calls to the mod() function. If that // doesn't matter, // the code can be shortened. for (j = 1; j < N - 1; j++) { for (i = 1; i < M - 1; i++) { ccapacity = (deerfood[i][j]) / .55f; // capacity is // the deerfood // in megagrams // divided by // amount deer // eat. deerfood // has already // been modified // in deercell // to be in // megagrams and // to percent // deer eat has // also been // accounted for // newdeer[i][j] = // ccapacity*deer[i][j]/(.001+(deer[i][j]+gamma*(ccapacity-deer[i][j]))); diffusionterm = deer[i - 1][j] + deer[i + 1][j] + deer[i][j - 1] + deer[i][j + 1] - 4.0f * deer[i][j]; deer[i][j] = deer[i][j] + diffcoeff * diffusionterm / (cellwidth * cellwidth); } } // Next handle the vertical boundaries. for (j = 0; j < N; j += N - 1) { for (i = 1; i < M - 1; i++) { ccapacity = (deerfood[i][j]) / .55f; // newdeer[i][j] = // ccapacity*deer[i][j]/(.001+(deer[i][j]+gamma*(ccapacity-deer[i][j]))); diffusionterm = deer[i - 1][j] + deer[i + 1][j] + deer[i][mod(j - 1, N)] + deer[i][mod(j - 1, N)] - 4.0f * deer[i][j]; deer[i][j] = deer[i][j] + diffcoeff * diffusionterm / (cellwidth * cellwidth); } } // Finally handle horizontal boundaries. for (j = 0; j < N; j++) { for (i = 0; i < M; i += M - 1) { ccapacity = (deerfood[i][j]) / .55f; // newdeer[i][j] = // ccapacity*deer[i][j]/(.001+(deer[i][j]+gamma*(ccapacity-deer[i][j]))); diffusionterm = deer[mod(i - 1, M)][j] + deer[mod(i + 1, M)][j] + deer[i][mod(j - 1, N)] + deer[i][mod(j + 1, N)] - 4.0f * deer[i][j]; deer[i][j] = deer[i][j] + diffcoeff * diffusionterm / (cellwidth * cellwidth); // printf("%d %d i-1: %d i+1: %d j-1: %d j+1: // %d\n",i,j,mod(i-1,M),mod(i+1,M),mod(j-1,N),mod(j+1,N)); } } // Now put the results back into the deer array. // for(j=0;j<N;j++){ // for(i=0;i<M;i++){ // deer[i][j] = newdeer[i][j]; // } // } } } // printf("end of Deer Diffuse\n"); } // This function just computes a mod b, where a is called "num", // and b is called "den". int mod(int num, int den) { if (num >= 0) num = num - (num / den) * den; else num = den + num; return num; } /* * The next function is an implementation of the preconditioned conjugate * gradient method of Concus, Golub, and O'Leary. It solves Ax = b where A * is an MN dimensional square matrix, and x and b are MN dimensional * vectors. The user must supply two functions with preassigned names: * Amult(Av,v) - computes the product Av for a given vector v; Msolve(v,y) - * solves Mv = y for v given y. All other matrix operations are handled in * BLAS. Incidentally, we are using single precision throughout these * calculations because the modelling is so speculative and the numerics so * simple that using float precision would just be wasted time/effort. * * Note that the rhs b is only used to compute the initial residual, so * calling this as pcg(b,b) should be acceptable. */ public void pcg(float[] x, float[] b) { int M = deer_y_size; int N = deer_x_size; int i, k; int idim = M * N; int max_it = 100; float[] r = new float[M * N]; float[] z = new float[M * N]; float beta; float zprod1 = 0.0f; float zprod2 = 0.0f; float[] p = new float[M * N]; float[] temporary = new float[M * N]; float alpha; float tol = .1e-4f; float check; for (i = 0; i < idim; i++) { r[i] = b[i]; } for (i = 0; i < idim; i++) { p[i] = 0.0f; } for (i = 0; i < idim; i++) { x[i] = 0.0f; } for (k = 0; k < max_it; k++) { check = sdot(r, r); if (check < tol) { // printf(" convergence in %d iterations\n",k); return; } Msolve(z, r); zprod1 = sdot(z, r); if (zprod2 != 0.0) beta = zprod1 / zprod2; else beta = 0.0f; zprod2 = zprod1; saxpy(z, p, beta, p); // computing p(k) Amult(temporary, p); alpha = zprod2 / sdot(p, temporary); saxpy(x, p, alpha, x); saxpy(r, temporary, -alpha, r); } } /* BLAS sdot function for this package. */ float sdot(float[] u, float[] v) { int M = deer_y_size; int N = deer_x_size; int i, idim = M * N; float accum = 0.0f; for (i = 0; i < idim; i++) { accum += u[i] * v[i]; } return accum; } /* * BLAS saxpy function for this package. The result goes in w It is alright * for w to be the same as u or v in memory */ public void saxpy(float[] u, float[] v, float a, float[] w) { int M = deer_y_size; int N = deer_x_size; int i, idim = M * N; float temporary; for (i = 0; i < idim; i++) { temporary = a * v[i]; w[i] = temporary + u[i]; } } /* * Function to multiply a vector of dimension M*N by the diffusion matrix. * Remember that the vector v is actually an array representing deer * population of cell of an MxN rectangular grid. */ public void Amult(float[] Av, float[] v) { int M = deer_y_size; int N = deer_x_size; int i, j; int index; int ipmod, immod, jpmod, jmmod; float diffcoef = .1f; // for implicit - this is the real diffusivity. // original = .1 for (i = 0; i < M; i++) { for (j = 0; j < N; j++) { index = i * N + j; ipmod = mod(i + 1, M); immod = mod(i - 1, M); jpmod = mod(j + 1, N); jmmod = mod(j - 1, N); Av[index] = 4.0f * v[index]; Av[index] += -v[ipmod * N + jpmod] - v[ipmod * N + jmmod]; Av[index] += -v[immod * N + jpmod] - v[immod * N + jmmod]; Av[index] = diffcoef * Av[index] + v[index]; } } } public void Msolve(float[] v, float[] y) { int M = deer_y_size; int N = deer_x_size; int i; for (i = 0; i < M * N; i++) { v[i] = y[i]; } } public void updateRandomCells() { int x, y; try { java.lang.reflect.Method method = Cell.class.getMethod(theMethod, Integer.TYPE); for (y = 0; y < wy; y++) { for (x = 0; x < wx; x++) { Cell cell; cell = (Cell) world.getObjectAt(x, y); method.invoke(cell, ((int) (data[y][x] * (1.0 + gaussian() / 4.0)))); //cell.perform(theMethod, ((int) (data[y][x] * (1.0 + gaussian() / 4.0)))); } } } catch (Exception e) { e.printStackTrace(); } } public void updateCellWorld() { switch (which_yield) { case -1: // init data files is folder data updateDataGrid(); updateCells(); break; case 0: updateDataGrid(); updateCells(); break; case 1: updateRandomCells(); break; default: break; // cases 2,3 should not change with updates } if (Village.DEBUG) System.out.printf("end updateCellWorld\n"); } // yield specific stuff public void initYields() { int x, y; switch (which_yield) { case 1: for (y = 0; y < wy; y++) { for (x = 0; x < wx; x++) { data[y][x] = ((int) (random_yield / 2.3677) - 4) / 10; } } break; case 2: for (y = 0; y < wy; y++) { for (x = 0; x < wx; x++) { data[y][x] = ((int) (random_yield / 2.3677) - 4) / 10; } } break; case 3: peakIt(); break; default: break; } } public void peakIt() { int i, j; for (i = 0; i <= wy / 2; i++) { for (j = 0; j < wx; j++) { data[i][j] = i / 2 + 5; } } for (i = wy - 1; i > wy / 2; i--) { for (j = 0; j < wx; j++) { data[i][j] = wy / 2 - i / 2 + 5; } } for (j = 0; j < wx / 2; j++) { for (i = j; i < wy - j - 1; i++) { data[i + 1][j] = data[i][j]; } } for (j = wx - 1; j >= wx / 2; j--) { for (i = wx - j - 1; i < j - wx + wy; i++) { data[i + 1][j] = data[i][j]; } } } float gaussian() { // Box Muller transformation float x1, x2, w; if (tmp == 0) { do { x1 = (float) (2.0 * Village.uniformDblRand(0, 1) - 1.0); x2 = (float) (2.0 * Village.uniformDblRand(0, 1) - 1.0); w = x1 * x1 + x2 * x2; } while (w >= 1.0f); w = (float) Math.sqrt((-2.0 * Math.log(w)) / w); // return two rng mean 0 sd 1 r1 = x1 * w; tmp = 1; return (x2 * w); } else { tmp = 0; return (r1); } } }
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.management.registry; import org.ehcache.CacheManager; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.builders.CacheConfigurationBuilder; import org.ehcache.config.builders.CacheManagerBuilder; import org.ehcache.management.ManagementRegistryServiceConfiguration; import org.ehcache.management.SharedManagementService; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.terracotta.management.model.call.ContextualReturn; import org.terracotta.management.model.capabilities.Capability; import org.terracotta.management.model.context.Context; import org.terracotta.management.model.context.ContextContainer; import org.terracotta.management.model.stats.ContextualStatistics; import org.terracotta.management.registry.ResultSet; import org.terracotta.management.registry.StatisticQuery; import org.terracotta.management.registry.StatisticQuery.Builder; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; import static org.ehcache.config.builders.ResourcePoolsBuilder.heap; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.isIn; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @RunWith(JUnit4.class) public class DefaultSharedManagementServiceTest { CacheManager cacheManager1; CacheManager cacheManager2; SharedManagementService service; ManagementRegistryServiceConfiguration config1; ManagementRegistryServiceConfiguration config2; @Rule public final Timeout globalTimeout = Timeout.seconds(10); @Before public void init() { CacheConfiguration<Long, String> cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); service = new DefaultSharedManagementService(); cacheManager1 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache1", cacheConfiguration) .using(service) .using(config1 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM1")) .build(true); cacheManager2 = CacheManagerBuilder.newCacheManagerBuilder() .withCache("aCache2", cacheConfiguration) .withCache("aCache3", cacheConfiguration) .using(service) .using(config2 = new DefaultManagementRegistryConfiguration().setCacheManagerAlias("myCM2")) .build(true); // this serie of calls make sure the registry still works after a full init / close / init loop cacheManager1.close(); cacheManager1.init(); cacheManager2.close(); cacheManager2.init(); } @After() public void close() { cacheManager2.close(); cacheManager1.close(); } @Test public void testSharedContexts() { assertEquals(2, service.getContextContainers().size()); ContextContainer contextContainer1 = service.getContextContainers().get(config1.getContext()); ContextContainer contextContainer2 = service.getContextContainers().get(config2.getContext()); assertThat(contextContainer1, is(notNullValue())); assertThat(contextContainer2, is(notNullValue())); assertThat(contextContainer1.getName(), equalTo("cacheManagerName")); assertThat(contextContainer1.getValue(), equalTo("myCM1")); assertThat(contextContainer2.getName(), equalTo("cacheManagerName")); assertThat(contextContainer2.getValue(), equalTo("myCM2")); assertThat(contextContainer1.getSubContexts().size(), equalTo(1)); assertThat(contextContainer1.getSubContexts().iterator().next().getName(), equalTo("cacheName")); assertThat(contextContainer1.getSubContexts().iterator().next().getValue(), equalTo("aCache1")); assertThat(contextContainer2.getSubContexts().size(), equalTo(2)); assertThat(contextContainer2.getSubContexts().iterator().next().getName(), equalTo("cacheName")); assertThat(new ArrayList<>(contextContainer2.getSubContexts()).get(1).getName(), equalTo("cacheName")); assertThat(new ArrayList<>(contextContainer2.getSubContexts()).get(0).getValue(), isIn(Arrays.asList("aCache2", "aCache3"))); assertThat(new ArrayList<>(contextContainer2.getSubContexts()).get(1).getValue(), isIn(Arrays.asList("aCache2", "aCache3"))); } @Test public void testSharedCapabilities() { assertEquals(2, service.getCapabilitiesByContext().size()); Collection<? extends Capability> capabilities1 = service.getCapabilitiesByContext().get(config1.getContext()); Collection<? extends Capability> capabilities2 = service.getCapabilitiesByContext().get(config2.getContext()); assertThat(capabilities1, hasSize(4)); assertThat(new ArrayList<Capability>(capabilities1).get(0).getName(), equalTo("ActionsCapability")); assertThat(new ArrayList<Capability>(capabilities1).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList<Capability>(capabilities1).get(2).getName(), equalTo("StatisticCollectorCapability")); assertThat(new ArrayList<Capability>(capabilities1).get(3).getName(), equalTo("StatisticsCapability")); assertThat(capabilities2, hasSize(4)); assertThat(new ArrayList<Capability>(capabilities2).get(0).getName(), equalTo("ActionsCapability")); assertThat(new ArrayList<Capability>(capabilities2).get(1).getName(), equalTo("SettingsCapability")); assertThat(new ArrayList<Capability>(capabilities2).get(2).getName(), equalTo("StatisticCollectorCapability")); assertThat(new ArrayList<Capability>(capabilities2).get(3).getName(), equalTo("StatisticsCapability")); } @Test public void testStats() { String statisticName = "Cache:MissCount"; List<Context> contextList = Arrays.asList( Context.empty() .with("cacheManagerName", "myCM1") .with("cacheName", "aCache1"), Context.empty() .with("cacheManagerName", "myCM2") .with("cacheName", "aCache2"), Context.empty() .with("cacheManagerName", "myCM2") .with("cacheName", "aCache3")); cacheManager1.getCache("aCache1", Long.class, String.class).get(1L); cacheManager2.getCache("aCache2", Long.class, String.class).get(2L); cacheManager2.getCache("aCache3", Long.class, String.class).get(3L); Builder builder = service.withCapability("StatisticsCapability") .queryStatistic(statisticName) .on(contextList); ResultSet<ContextualStatistics> allCounters = getResultSet(builder, contextList, statisticName); assertThat(allCounters.size(), equalTo(3)); assertThat(allCounters.getResult(contextList.get(0)).size(), equalTo(1)); assertThat(allCounters.getResult(contextList.get(1)).size(), equalTo(1)); assertThat(allCounters.getResult(contextList.get(2)).size(), equalTo(1)); assertThat(allCounters.getResult(contextList.get(0)).getLatestSampleValue(statisticName).get(), equalTo(1L)); assertThat(allCounters.getResult(contextList.get(1)).getLatestSampleValue(statisticName).get(), equalTo(1L)); assertThat(allCounters.getResult(contextList.get(2)).getLatestSampleValue(statisticName).get(), equalTo(1L)); } private static ResultSet<ContextualStatistics> getResultSet(StatisticQuery.Builder builder, List<Context> contextList, String statisticsName) { ResultSet<ContextualStatistics> counters = null; //wait till Counter history is initialized and contains values > 0. while(!Thread.currentThread().isInterrupted()) { counters = builder.build().execute(); if(counters.getResult(contextList.get(0)).<Long>getLatestSampleValue(statisticsName).get()> 0 && counters.getResult(contextList.get(1)).<Long>getLatestSampleValue(statisticsName).get() > 0 && counters.getResult(contextList.get(2)).<Long>getLatestSampleValue(statisticsName).get() > 0) { break; } } return counters; } @Test public void testCall() throws ExecutionException { List<Context> contextList = Arrays.asList( Context.empty() .with("cacheManagerName", "myCM1") .with("cacheName", "aCache1"), Context.empty() .with("cacheManagerName", "myCM1") .with("cacheName", "aCache4"), Context.empty() .with("cacheManagerName", "myCM2") .with("cacheName", "aCache2"), Context.empty() .with("cacheManagerName", "myCM55") .with("cacheName", "aCache55")); cacheManager1.getCache("aCache1", Long.class, String.class).put(1L, "1"); cacheManager2.getCache("aCache2", Long.class, String.class).put(2L, "2"); assertThat(cacheManager1.getCache("aCache1", Long.class, String.class).get(1L), equalTo("1")); assertThat(cacheManager2.getCache("aCache2", Long.class, String.class).get(2L), equalTo("2")); CacheConfiguration<Long, String> cacheConfiguration = CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class, heap(10)) .build(); cacheManager1.createCache("aCache4", cacheConfiguration); cacheManager1.getCache("aCache4", Long.class, String.class).put(4L, "4"); assertThat(cacheManager1.getCache("aCache4", Long.class, String.class).get(4L), equalTo("4")); ResultSet<? extends ContextualReturn<?>> results = service.withCapability("ActionsCapability") .call("clear") .on(contextList) .build() .execute(); assertThat(results.size(), Matchers.equalTo(4)); assertThat(results.getResult(contextList.get(0)).hasExecuted(), is(true)); assertThat(results.getResult(contextList.get(1)).hasExecuted(), is(true)); assertThat(results.getResult(contextList.get(2)).hasExecuted(), is(true)); assertThat(results.getResult(contextList.get(3)).hasExecuted(), is(false)); assertThat(results.getResult(contextList.get(0)).getValue(), is(nullValue())); assertThat(results.getResult(contextList.get(1)).getValue(), is(nullValue())); assertThat(results.getResult(contextList.get(2)).getValue(), is(nullValue())); try { results.getResult(contextList.get(3)).getValue(); fail(); } catch (Exception e) { assertThat(e, instanceOf(NoSuchElementException.class)); } assertThat(cacheManager1.getCache("aCache1", Long.class, String.class).get(1L), is(Matchers.nullValue())); assertThat(cacheManager2.getCache("aCache2", Long.class, String.class).get(2L), is(Matchers.nullValue())); assertThat(cacheManager1.getCache("aCache4", Long.class, String.class).get(4L), is(Matchers.nullValue())); } }
/* * WSO2 API Manager - Publisher API * This specifies a **RESTful API** for WSO2 **API Manager** - Publisher. Please see [full swagger definition](https://raw.githubusercontent.com/wso2/carbon-apimgt/v6.0.4/components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher/src/main/resources/publisher-api.yaml) of the API which is written using [swagger 2.0](http://swagger.io/) specification. * * OpenAPI spec version: v1.1 * Contact: architecture@wso2.com * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package org.wso2.am.integration.clients.publisher.api.v1; import org.wso2.am.integration.clients.publisher.api.ApiCallback; import org.wso2.am.integration.clients.publisher.api.ApiClient; import org.wso2.am.integration.clients.publisher.api.ApiException; import org.wso2.am.integration.clients.publisher.api.ApiResponse; import org.wso2.am.integration.clients.publisher.api.Configuration; import org.wso2.am.integration.clients.publisher.api.Pair; import org.wso2.am.integration.clients.publisher.api.ProgressRequestBody; import org.wso2.am.integration.clients.publisher.api.ProgressResponseBody; import com.google.gson.reflect.TypeToken; import java.io.IOException; import org.wso2.am.integration.clients.publisher.api.v1.dto.AlertConfigDTO; import org.wso2.am.integration.clients.publisher.api.v1.dto.AlertConfigInfoDTO; import org.wso2.am.integration.clients.publisher.api.v1.dto.AlertConfigListDTO; import org.wso2.am.integration.clients.publisher.api.v1.dto.ErrorDTO; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class AlertConfigurationApi { private ApiClient apiClient; public AlertConfigurationApi() { this(Configuration.getDefaultApiClient()); } public AlertConfigurationApi(ApiClient apiClient) { this.apiClient = apiClient; } public ApiClient getApiClient() { return apiClient; } public void setApiClient(ApiClient apiClient) { this.apiClient = apiClient; } /** * Build call for addAlertConfig * @param alertType The alert type. (required) * @param configurationId The alert configuration id. (required) * @param body Configuration for AbnormalRequestCount alert type (required) * @param progressListener Progress listener * @param progressRequestListener Progress request listener * @return Call to execute * @throws ApiException If fail to serialize the request body object */ public com.squareup.okhttp.Call addAlertConfigCall(String alertType, String configurationId, AlertConfigInfoDTO body, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { Object localVarPostBody = body; // create path and map variables String localVarPath = "/alerts/{alertType}/configurations/{configurationId}" .replaceAll("\\{" + "alertType" + "\\}", apiClient.escapeString(alertType.toString())) .replaceAll("\\{" + "configurationId" + "\\}", apiClient.escapeString(configurationId.toString())); List<Pair> localVarQueryParams = new ArrayList<Pair>(); List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); final String[] localVarAccepts = { "application/json" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept); final String[] localVarContentTypes = { "application/json" }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); localVarHeaderParams.put("Content-Type", localVarContentType); if(progressListener != null) { apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() { @Override public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException { com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request()); return originalResponse.newBuilder() .body(new ProgressResponseBody(originalResponse.body(), progressListener)) .build(); } }); } String[] localVarAuthNames = new String[] { "OAuth2Security" }; return apiClient.buildCall(localVarPath, "PUT", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener); } @SuppressWarnings("rawtypes") private com.squareup.okhttp.Call addAlertConfigValidateBeforeCall(String alertType, String configurationId, AlertConfigInfoDTO body, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { // verify the required parameter 'alertType' is set if (alertType == null) { throw new ApiException("Missing the required parameter 'alertType' when calling addAlertConfig(Async)"); } // verify the required parameter 'configurationId' is set if (configurationId == null) { throw new ApiException("Missing the required parameter 'configurationId' when calling addAlertConfig(Async)"); } // verify the required parameter 'body' is set if (body == null) { throw new ApiException("Missing the required parameter 'body' when calling addAlertConfig(Async)"); } com.squareup.okhttp.Call call = addAlertConfigCall(alertType, configurationId, body, progressListener, progressRequestListener); return call; } /** * Add AbnormalRequestsPerMin alert configurations. * This operation is used to add configuration for the AbnormalRequestsPerMin alert type. * @param alertType The alert type. (required) * @param configurationId The alert configuration id. (required) * @param body Configuration for AbnormalRequestCount alert type (required) * @return AlertConfigDTO * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public AlertConfigDTO addAlertConfig(String alertType, String configurationId, AlertConfigInfoDTO body) throws ApiException { ApiResponse<AlertConfigDTO> resp = addAlertConfigWithHttpInfo(alertType, configurationId, body); return resp.getData(); } /** * Add AbnormalRequestsPerMin alert configurations. * This operation is used to add configuration for the AbnormalRequestsPerMin alert type. * @param alertType The alert type. (required) * @param configurationId The alert configuration id. (required) * @param body Configuration for AbnormalRequestCount alert type (required) * @return ApiResponse&lt;AlertConfigDTO&gt; * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public ApiResponse<AlertConfigDTO> addAlertConfigWithHttpInfo(String alertType, String configurationId, AlertConfigInfoDTO body) throws ApiException { com.squareup.okhttp.Call call = addAlertConfigValidateBeforeCall(alertType, configurationId, body, null, null); Type localVarReturnType = new TypeToken<AlertConfigDTO>(){}.getType(); return apiClient.execute(call, localVarReturnType); } /** * Add AbnormalRequestsPerMin alert configurations. (asynchronously) * This operation is used to add configuration for the AbnormalRequestsPerMin alert type. * @param alertType The alert type. (required) * @param configurationId The alert configuration id. (required) * @param body Configuration for AbnormalRequestCount alert type (required) * @param callback The callback to be executed when the API call finishes * @return The request call * @throws ApiException If fail to process the API call, e.g. serializing the request body object */ public com.squareup.okhttp.Call addAlertConfigAsync(String alertType, String configurationId, AlertConfigInfoDTO body, final ApiCallback<AlertConfigDTO> callback) throws ApiException { ProgressResponseBody.ProgressListener progressListener = null; ProgressRequestBody.ProgressRequestListener progressRequestListener = null; if (callback != null) { progressListener = new ProgressResponseBody.ProgressListener() { @Override public void update(long bytesRead, long contentLength, boolean done) { callback.onDownloadProgress(bytesRead, contentLength, done); } }; progressRequestListener = new ProgressRequestBody.ProgressRequestListener() { @Override public void onRequestProgress(long bytesWritten, long contentLength, boolean done) { callback.onUploadProgress(bytesWritten, contentLength, done); } }; } com.squareup.okhttp.Call call = addAlertConfigValidateBeforeCall(alertType, configurationId, body, progressListener, progressRequestListener); Type localVarReturnType = new TypeToken<AlertConfigDTO>(){}.getType(); apiClient.executeAsync(call, localVarReturnType, callback); return call; } /** * Build call for deleteAlertConfig * @param alertType The alert type. (required) * @param configurationId The alert configuration id. (required) * @param progressListener Progress listener * @param progressRequestListener Progress request listener * @return Call to execute * @throws ApiException If fail to serialize the request body object */ public com.squareup.okhttp.Call deleteAlertConfigCall(String alertType, String configurationId, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { Object localVarPostBody = null; // create path and map variables String localVarPath = "/alerts/{alertType}/configurations/{configurationId}" .replaceAll("\\{" + "alertType" + "\\}", apiClient.escapeString(alertType.toString())) .replaceAll("\\{" + "configurationId" + "\\}", apiClient.escapeString(configurationId.toString())); List<Pair> localVarQueryParams = new ArrayList<Pair>(); List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); final String[] localVarAccepts = { "application/json" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept); final String[] localVarContentTypes = { "application/json" }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); localVarHeaderParams.put("Content-Type", localVarContentType); if(progressListener != null) { apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() { @Override public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException { com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request()); return originalResponse.newBuilder() .body(new ProgressResponseBody(originalResponse.body(), progressListener)) .build(); } }); } String[] localVarAuthNames = new String[] { "OAuth2Security" }; return apiClient.buildCall(localVarPath, "DELETE", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener); } @SuppressWarnings("rawtypes") private com.squareup.okhttp.Call deleteAlertConfigValidateBeforeCall(String alertType, String configurationId, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { // verify the required parameter 'alertType' is set if (alertType == null) { throw new ApiException("Missing the required parameter 'alertType' when calling deleteAlertConfig(Async)"); } // verify the required parameter 'configurationId' is set if (configurationId == null) { throw new ApiException("Missing the required parameter 'configurationId' when calling deleteAlertConfig(Async)"); } com.squareup.okhttp.Call call = deleteAlertConfigCall(alertType, configurationId, progressListener, progressRequestListener); return call; } /** * Delete the selected configuration from AbnormalRequestsPerMin alert type. * This operation is used to delete configuration from the AbnormalRequestsPerMin alert type. * @param alertType The alert type. (required) * @param configurationId The alert configuration id. (required) * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public void deleteAlertConfig(String alertType, String configurationId) throws ApiException { deleteAlertConfigWithHttpInfo(alertType, configurationId); } /** * Delete the selected configuration from AbnormalRequestsPerMin alert type. * This operation is used to delete configuration from the AbnormalRequestsPerMin alert type. * @param alertType The alert type. (required) * @param configurationId The alert configuration id. (required) * @return ApiResponse&lt;Void&gt; * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public ApiResponse<Void> deleteAlertConfigWithHttpInfo(String alertType, String configurationId) throws ApiException { com.squareup.okhttp.Call call = deleteAlertConfigValidateBeforeCall(alertType, configurationId, null, null); return apiClient.execute(call); } /** * Delete the selected configuration from AbnormalRequestsPerMin alert type. (asynchronously) * This operation is used to delete configuration from the AbnormalRequestsPerMin alert type. * @param alertType The alert type. (required) * @param configurationId The alert configuration id. (required) * @param callback The callback to be executed when the API call finishes * @return The request call * @throws ApiException If fail to process the API call, e.g. serializing the request body object */ public com.squareup.okhttp.Call deleteAlertConfigAsync(String alertType, String configurationId, final ApiCallback<Void> callback) throws ApiException { ProgressResponseBody.ProgressListener progressListener = null; ProgressRequestBody.ProgressRequestListener progressRequestListener = null; if (callback != null) { progressListener = new ProgressResponseBody.ProgressListener() { @Override public void update(long bytesRead, long contentLength, boolean done) { callback.onDownloadProgress(bytesRead, contentLength, done); } }; progressRequestListener = new ProgressRequestBody.ProgressRequestListener() { @Override public void onRequestProgress(long bytesWritten, long contentLength, boolean done) { callback.onUploadProgress(bytesWritten, contentLength, done); } }; } com.squareup.okhttp.Call call = deleteAlertConfigValidateBeforeCall(alertType, configurationId, progressListener, progressRequestListener); apiClient.executeAsync(call, callback); return call; } /** * Build call for getAllAlertConfigs * @param alertType The alert type. (required) * @param progressListener Progress listener * @param progressRequestListener Progress request listener * @return Call to execute * @throws ApiException If fail to serialize the request body object */ public com.squareup.okhttp.Call getAllAlertConfigsCall(String alertType, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { Object localVarPostBody = null; // create path and map variables String localVarPath = "/alerts/{alertType}/configurations" .replaceAll("\\{" + "alertType" + "\\}", apiClient.escapeString(alertType.toString())); List<Pair> localVarQueryParams = new ArrayList<Pair>(); List<Pair> localVarCollectionQueryParams = new ArrayList<Pair>(); Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); final String[] localVarAccepts = { "application/json" }; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept); final String[] localVarContentTypes = { "application/json" }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); localVarHeaderParams.put("Content-Type", localVarContentType); if(progressListener != null) { apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() { @Override public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException { com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request()); return originalResponse.newBuilder() .body(new ProgressResponseBody(originalResponse.body(), progressListener)) .build(); } }); } String[] localVarAuthNames = new String[] { "OAuth2Security" }; return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarCollectionQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener); } @SuppressWarnings("rawtypes") private com.squareup.okhttp.Call getAllAlertConfigsValidateBeforeCall(String alertType, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { // verify the required parameter 'alertType' is set if (alertType == null) { throw new ApiException("Missing the required parameter 'alertType' when calling getAllAlertConfigs(Async)"); } com.squareup.okhttp.Call call = getAllAlertConfigsCall(alertType, progressListener, progressRequestListener); return call; } /** * Get all AbnormalRequestsPerMin alert configurations * This operation is used to get all configurations of the AbnormalRequestsPerMin alert type. * @param alertType The alert type. (required) * @return AlertConfigListDTO * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public AlertConfigListDTO getAllAlertConfigs(String alertType) throws ApiException { ApiResponse<AlertConfigListDTO> resp = getAllAlertConfigsWithHttpInfo(alertType); return resp.getData(); } /** * Get all AbnormalRequestsPerMin alert configurations * This operation is used to get all configurations of the AbnormalRequestsPerMin alert type. * @param alertType The alert type. (required) * @return ApiResponse&lt;AlertConfigListDTO&gt; * @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body */ public ApiResponse<AlertConfigListDTO> getAllAlertConfigsWithHttpInfo(String alertType) throws ApiException { com.squareup.okhttp.Call call = getAllAlertConfigsValidateBeforeCall(alertType, null, null); Type localVarReturnType = new TypeToken<AlertConfigListDTO>(){}.getType(); return apiClient.execute(call, localVarReturnType); } /** * Get all AbnormalRequestsPerMin alert configurations (asynchronously) * This operation is used to get all configurations of the AbnormalRequestsPerMin alert type. * @param alertType The alert type. (required) * @param callback The callback to be executed when the API call finishes * @return The request call * @throws ApiException If fail to process the API call, e.g. serializing the request body object */ public com.squareup.okhttp.Call getAllAlertConfigsAsync(String alertType, final ApiCallback<AlertConfigListDTO> callback) throws ApiException { ProgressResponseBody.ProgressListener progressListener = null; ProgressRequestBody.ProgressRequestListener progressRequestListener = null; if (callback != null) { progressListener = new ProgressResponseBody.ProgressListener() { @Override public void update(long bytesRead, long contentLength, boolean done) { callback.onDownloadProgress(bytesRead, contentLength, done); } }; progressRequestListener = new ProgressRequestBody.ProgressRequestListener() { @Override public void onRequestProgress(long bytesWritten, long contentLength, boolean done) { callback.onUploadProgress(bytesWritten, contentLength, done); } }; } com.squareup.okhttp.Call call = getAllAlertConfigsValidateBeforeCall(alertType, progressListener, progressRequestListener); Type localVarReturnType = new TypeToken<AlertConfigListDTO>(){}.getType(); apiClient.executeAsync(call, localVarReturnType, callback); return call; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.test.junit4; import java.io.File; import java.util.Collection; import java.util.List; import java.util.Locale; import org.apache.camel.CamelContext; import org.apache.camel.Channel; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.Expression; import org.apache.camel.InvalidPayloadException; import org.apache.camel.Message; import org.apache.camel.Predicate; import org.apache.camel.Processor; import org.apache.camel.Route; import org.apache.camel.builder.Builder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.builder.ValueBuilder; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.impl.DefaultExchange; import org.apache.camel.processor.DelegateProcessor; import org.apache.camel.util.PredicateAssertHelper; import org.junit.Assert; import org.junit.Rule; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A bunch of useful testing methods * * @version */ public abstract class TestSupport extends Assert { protected static final String LS = System.getProperty("line.separator"); private static final Logger LOG = LoggerFactory.getLogger(TestSupport.class); protected Logger log = LoggerFactory.getLogger(getClass()); private TestName testName = new TestName(); // Builder methods for expressions used when testing // ------------------------------------------------------------------------- /** * Returns a value builder for the given header */ public static ValueBuilder header(String name) { return Builder.header(name); } /** * Returns a value builder for the given property */ public static ValueBuilder property(String name) { return Builder.exchangeProperty(name); } /** * Returns a predicate and value builder for the inbound body on an exchange */ public static ValueBuilder body() { return Builder.body(); } /** * Returns a predicate and value builder for the inbound message body as a * specific type */ public static <T> ValueBuilder bodyAs(Class<T> type) { return Builder.bodyAs(type); } /** * Returns a predicate and value builder for the outbound body on an * exchange */ public static ValueBuilder outBody() { return Builder.outBody(); } /** * Returns a predicate and value builder for the outbound message body as a * specific type */ public static <T> ValueBuilder outBodyAs(Class<T> type) { return Builder.outBodyAs(type); } /** * Returns a predicate and value builder for the fault body on an * exchange */ public static ValueBuilder faultBody() { return Builder.faultBody(); } /** * Returns a predicate and value builder for the fault message body as a * specific type */ public static <T> ValueBuilder faultBodyAs(Class<T> type) { return Builder.faultBodyAs(type); } /** * Returns a value builder for the given system property */ public static ValueBuilder systemProperty(String name) { return Builder.systemProperty(name); } /** * Returns a value builder for the given system property */ public static ValueBuilder systemProperty(String name, String defaultValue) { return Builder.systemProperty(name, defaultValue); } // Assertions // ----------------------------------------------------------------------- public static <T> T assertIsInstanceOf(Class<T> expectedType, Object value) { assertNotNull("Expected an instance of type: " + expectedType.getName() + " but was null", value); assertTrue("Object should be of type " + expectedType.getName() + " but was: " + value + " with the type: " + value.getClass().getName(), expectedType.isInstance(value)); return expectedType.cast(value); } public static void assertEndpointUri(Endpoint endpoint, String uri) { assertNotNull("Endpoint is null when expecting endpoint for: " + uri, endpoint); assertEquals("Endpoint uri for: " + endpoint, uri, endpoint.getEndpointUri()); } /** * Asserts the In message on the exchange contains the expected value */ public static Object assertInMessageHeader(Exchange exchange, String name, Object expected) { return assertMessageHeader(exchange.getIn(), name, expected); } /** * Asserts the Out message on the exchange contains the expected value */ public static Object assertOutMessageHeader(Exchange exchange, String name, Object expected) { return assertMessageHeader(exchange.getOut(), name, expected); } /** * Asserts that the given exchange has an OUT message of the given body value * * @param exchange the exchange which should have an OUT message * @param expected the expected value of the OUT message * @throws InvalidPayloadException is thrown if the payload is not the expected class type */ public static void assertInMessageBodyEquals(Exchange exchange, Object expected) throws InvalidPayloadException { assertNotNull("Should have a response exchange!", exchange); Object actual; if (expected == null) { actual = exchange.getIn().getMandatoryBody(); assertEquals("in body of: " + exchange, expected, actual); } else { actual = exchange.getIn().getMandatoryBody(expected.getClass()); } assertEquals("in body of: " + exchange, expected, actual); LOG.debug("Received response: " + exchange + " with in: " + exchange.getIn()); } /** * Asserts that the given exchange has an OUT message of the given body value * * @param exchange the exchange which should have an OUT message * @param expected the expected value of the OUT message * @throws InvalidPayloadException is thrown if the payload is not the expected class type */ public static void assertOutMessageBodyEquals(Exchange exchange, Object expected) throws InvalidPayloadException { assertNotNull("Should have a response exchange!", exchange); Object actual; if (expected == null) { actual = exchange.getOut().getMandatoryBody(); assertEquals("output body of: " + exchange, expected, actual); } else { actual = exchange.getOut().getMandatoryBody(expected.getClass()); } assertEquals("output body of: " + exchange, expected, actual); LOG.debug("Received response: " + exchange + " with out: " + exchange.getOut()); } public static Object assertMessageHeader(Message message, String name, Object expected) { Object value = message.getHeader(name); assertEquals("Header: " + name + " on Message: " + message, expected, value); return value; } /** * Asserts that the given expression when evaluated returns the given answer */ public static Object assertExpression(Expression expression, Exchange exchange, Object expected) { Object value; if (expected != null) { value = expression.evaluate(exchange, expected.getClass()); } else { value = expression.evaluate(exchange, Object.class); } LOG.debug("Evaluated expression: " + expression + " on exchange: " + exchange + " result: " + value); assertEquals("Expression: " + expression + " on Exchange: " + exchange, expected, value); return value; } /** * Asserts that the predicate returns the expected value on the exchange */ public static void assertPredicateMatches(Predicate predicate, Exchange exchange) { assertPredicate(predicate, exchange, true); } /** * Asserts that the predicate returns the expected value on the exchange */ public static void assertPredicateDoesNotMatch(Predicate predicate, Exchange exchange) { try { PredicateAssertHelper.assertMatches(predicate, "Predicate should match: ", exchange); } catch (AssertionError e) { LOG.debug("Caught expected assertion error: " + e); } assertPredicate(predicate, exchange, false); } /** * Asserts that the predicate returns the expected value on the exchange */ public static boolean assertPredicate(final Predicate predicate, Exchange exchange, boolean expected) { if (expected) { PredicateAssertHelper.assertMatches(predicate, "Predicate failed: ", exchange); } boolean value = predicate.matches(exchange); LOG.debug("Evaluated predicate: " + predicate + " on exchange: " + exchange + " result: " + value); assertEquals("Predicate: " + predicate + " on Exchange: " + exchange, expected, value); return value; } /** * Resolves an endpoint and asserts that it is found */ public static Endpoint resolveMandatoryEndpoint(CamelContext context, String uri) { Endpoint endpoint = context.getEndpoint(uri); assertNotNull("No endpoint found for URI: " + uri, endpoint); return endpoint; } /** * Resolves an endpoint and asserts that it is found */ public static <T extends Endpoint> T resolveMandatoryEndpoint(CamelContext context, String uri, Class<T> endpointType) { T endpoint = context.getEndpoint(uri, endpointType); assertNotNull("No endpoint found for URI: " + uri, endpoint); return endpoint; } /** * Creates an exchange with the given body */ protected Exchange createExchangeWithBody(CamelContext camelContext, Object body) { Exchange exchange = new DefaultExchange(camelContext); Message message = exchange.getIn(); message.setHeader("testClass", getClass().getName()); message.setBody(body); return exchange; } public static <T> T assertOneElement(List<T> list) { assertEquals("Size of list should be 1: " + list, 1, list.size()); return list.get(0); } /** * Asserts that a list is of the given size */ public static <T> List<T> assertListSize(List<T> list, int size) { return assertListSize("List", list, size); } /** * Asserts that a list is of the given size */ public static <T> List<T> assertListSize(String message, List<T> list, int size) { assertEquals(message + " should be of size: " + size + " but is: " + list, size, list.size()); return list; } /** * Asserts that a list is of the given size */ public static <T> Collection<T> assertCollectionSize(Collection<T> list, int size) { return assertCollectionSize("List", list, size); } /** * Asserts that a list is of the given size */ public static <T> Collection<T> assertCollectionSize(String message, Collection<T> list, int size) { assertEquals(message + " should be of size: " + size + " but is: " + list, size, list.size()); return list; } /** * A helper method to create a list of Route objects for a given route builder */ public static List<Route> getRouteList(RouteBuilder builder) throws Exception { CamelContext context = new DefaultCamelContext(); context.addRoutes(builder); context.start(); List<Route> answer = context.getRoutes(); context.stop(); return answer; } /** * Asserts that the text contains the given string * * @param text the text to compare * @param containedText the text which must be contained inside the other text parameter */ public static void assertStringContains(String text, String containedText) { assertNotNull("Text should not be null!", text); assertTrue("Text: " + text + " does not contain: " + containedText, text.contains(containedText)); } /** * If a processor is wrapped with a bunch of DelegateProcessor or DelegateAsyncProcessor objects * this call will drill through them and return the wrapped Processor. */ @Deprecated public static Processor unwrap(Processor processor) { while (true) { if (processor instanceof DelegateProcessor) { processor = ((DelegateProcessor)processor).getProcessor(); } else { return processor; } } } /** * If a processor is wrapped with a bunch of DelegateProcessor or DelegateAsyncProcessor objects * this call will drill through them and return the Channel. * <p/> * Returns null if no channel is found. */ @Deprecated public static Channel unwrapChannel(Processor processor) { while (true) { if (processor instanceof Channel) { return (Channel) processor; } else if (processor instanceof DelegateProcessor) { processor = ((DelegateProcessor)processor).getProcessor(); } else { return null; } } } /** * Recursively delete a directory, useful to zapping test data * * @param file the directory to be deleted * @return <tt>false</tt> if error deleting directory */ public static boolean deleteDirectory(String file) { return deleteDirectory(new File(file)); } /** * Recursively delete a directory, useful to zapping test data * * @param file the directory to be deleted * @return <tt>false</tt> if error deleting directory */ public static boolean deleteDirectory(File file) { int tries = 0; int maxTries = 5; boolean exists = true; while (exists && (tries < maxTries)) { recursivelyDeleteDirectory(file); tries++; exists = file.exists(); if (exists) { try { Thread.sleep(1000); } catch (InterruptedException e) { // Ignore } } } return !exists; } private static void recursivelyDeleteDirectory(File file) { if (!file.exists()) { return; } if (file.isDirectory()) { File[] files = file.listFiles(); for (File child : files) { recursivelyDeleteDirectory(child); } } boolean success = file.delete(); if (!success) { LOG.warn("Deletion of file: " + file.getAbsolutePath() + " failed"); } } /** * create the directory * * @param file the directory to be created */ public static void createDirectory(String file) { File dir = new File(file); dir.mkdirs(); } /** * To be used for folder/directory comparison that works across different platforms such * as Window, Mac and Linux. */ public static void assertDirectoryEquals(String expected, String actual) { assertDirectoryEquals(null, expected, actual); } /** * To be used for folder/directory comparison that works across different platforms such * as Window, Mac and Linux. */ public static void assertDirectoryEquals(String message, String expected, String actual) { // must use single / as path separators String expectedPath = expected.replace('\\', '/'); String actualPath = actual.replace('\\', '/'); if (message != null) { assertEquals(message, expectedPath, actualPath); } else { assertEquals(expectedPath, actualPath); } } /** * To be used to check is a file is found in the file system */ public static void assertFileExists(String filename) { File file = new File(filename); assertTrue("File " + filename + " should exist", file.exists()); } /** * To be used to check is a file is <b>not</b> found in the file system */ public static void assertFileNotExists(String filename) { File file = new File(filename); assertFalse("File " + filename + " should not exist", file.exists()); } /** * Is this OS the given platform. * <p/> * Uses <tt>os.name</tt> from the system properties to determine the OS. * * @param platform such as Windows * @return <tt>true</tt> if its that platform. */ public static boolean isPlatform(String platform) { String osName = System.getProperty("os.name").toLowerCase(Locale.US); return osName.indexOf(platform.toLowerCase(Locale.US)) > -1; } /** * Is this Java by the given vendor. * <p/> * Uses <tt>java.vendor</tt> from the system properties to determine the vendor. * * @param vendor such as IBM * @return <tt>true</tt> if its that vendor. */ public static boolean isJavaVendor(String vendor) { String javaVendor = System.getProperty("java.vendor").toLowerCase(Locale.US); return javaVendor.indexOf(vendor.toLowerCase(Locale.US)) > -1; } /** * Is this Java 1.5 * * @return <tt>true</tt> if its Java 1.5, <tt>false</tt> if its not (for example Java 1.6 or better) * @deprecated will be removed in the near future as Camel now requires JDK1.6+ */ @Deprecated public static boolean isJava15() { String javaVersion = System.getProperty("java.version").toLowerCase(Locale.US); return javaVersion.startsWith("1.5"); } /** * Is this Java 1.6 * * @return <tt>true</tt> if its Java 1.6, <tt>false</tt> if its not (for example Java 1.7 or better) */ public static boolean isJava16() { String javaVersion = System.getProperty("java.version").toLowerCase(Locale.US); return javaVersion.startsWith("1.6"); } /** * Is this Java 1.7 * * @return <tt>true</tt> if its Java 1.7, <tt>false</tt> if its not (for example Java 1.6 or older) */ public static boolean isJava17() { String javaVersion = System.getProperty("java.version").toLowerCase(Locale.US); return javaVersion.startsWith("1.7"); } /** * Is this Java 1.8 * * @return <tt>true</tt> if its Java 1.8, <tt>false</tt> if its not (for example Java 1.7 or older) */ public static boolean isJava18() { String javaVersion = System.getProperty("java.version").toLowerCase(Locale.US); return javaVersion.startsWith("1.8"); } /** * Gets the current test name * * @return the test name */ @Rule public TestName getTestName() { return testName; } /** * Gets the current test method name * * @return the method name */ public String getTestMethodName() { return testName.getMethodName(); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.server; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.net.HostAndPort; import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.servlet.GuiceFilter; import io.druid.common.utils.SocketUtil; import io.druid.guice.GuiceInjectors; import io.druid.guice.Jerseys; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.LifecycleModule; import io.druid.guice.annotations.Self; import io.druid.guice.annotations.Smile; import io.druid.guice.http.DruidHttpClientConfig; import io.druid.initialization.Initialization; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.query.DefaultGenericQueryMetricsFactory; import io.druid.query.MapQueryToolChestWarehouse; import io.druid.query.Query; import io.druid.query.QueryToolChest; import io.druid.server.initialization.BaseJettyTest; import io.druid.server.initialization.jetty.JettyServerInitUtils; import io.druid.server.initialization.jetty.JettyServerInitializer; import io.druid.server.log.RequestLogger; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.server.router.QueryHostFinder; import io.druid.server.router.RendezvousHashAvaticaConnectionBalancer; import io.druid.server.security.AllowAllAuthorizer; import io.druid.server.security.NoopEscalator; import io.druid.server.security.Authorizer; import io.druid.server.security.AuthorizerMapper; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URI; import java.net.URL; import java.util.Collection; import java.util.concurrent.CountDownLatch; public class AsyncQueryForwardingServletTest extends BaseJettyTest { private static int port1; private static int port2; @Override @Before public void setup() throws Exception { setProperties(); Injector injector = setupInjector(); final DruidNode node = injector.getInstance(Key.get(DruidNode.class, Self.class)); port = node.getPlaintextPort(); port1 = SocketUtil.findOpenPortFrom(port + 1); port2 = SocketUtil.findOpenPortFrom(port1 + 1); lifecycle = injector.getInstance(Lifecycle.class); lifecycle.start(); ClientHolder holder = injector.getInstance(ClientHolder.class); client = holder.getClient(); } @Override protected Injector setupInjector() { return Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), ImmutableList.<Module>of( new Module() { @Override public void configure(Binder binder) { JsonConfigProvider.bindInstance( binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", null, null, true, false) ); binder.bind(JettyServerInitializer.class).to(ProxyJettyServerInit.class).in(LazySingleton.class); binder.bind(AuthorizerMapper.class).toInstance( new AuthorizerMapper(null) { @Override public Authorizer getAuthorizer(String name) { return new AllowAllAuthorizer(); } } ); Jerseys.addResource(binder, SlowResource.class); Jerseys.addResource(binder, ExceptionResource.class); Jerseys.addResource(binder, DefaultResource.class); LifecycleModule.register(binder, Server.class); } } ) ); } @Test public void testProxyGzipCompression() throws Exception { final URL url = new URL("http://localhost:" + port + "/proxy/default"); final HttpURLConnection get = (HttpURLConnection) url.openConnection(); get.setRequestProperty("Accept-Encoding", "gzip"); Assert.assertEquals("gzip", get.getContentEncoding()); final HttpURLConnection post = (HttpURLConnection) url.openConnection(); post.setRequestProperty("Accept-Encoding", "gzip"); post.setRequestMethod("POST"); Assert.assertEquals("gzip", post.getContentEncoding()); final HttpURLConnection getNoGzip = (HttpURLConnection) url.openConnection(); Assert.assertNotEquals("gzip", getNoGzip.getContentEncoding()); final HttpURLConnection postNoGzip = (HttpURLConnection) url.openConnection(); postNoGzip.setRequestMethod("POST"); Assert.assertNotEquals("gzip", postNoGzip.getContentEncoding()); } @Test(timeout = 60_000) public void testDeleteBroadcast() throws Exception { CountDownLatch latch = new CountDownLatch(2); makeTestDeleteServer(port1, latch).start(); makeTestDeleteServer(port2, latch).start(); final URL url = new URL("http://localhost:" + port + "/druid/v2/abc123"); final HttpURLConnection post = (HttpURLConnection) url.openConnection(); post.setRequestMethod("DELETE"); int code = post.getResponseCode(); Assert.assertEquals(200, code); latch.await(); } private static Server makeTestDeleteServer(int port, final CountDownLatch latch) { Server server = new Server(port); ServletHandler handler = new ServletHandler(); handler.addServletWithMapping(new ServletHolder(new HttpServlet() { @Override protected void doDelete(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { latch.countDown(); resp.setStatus(200); } }), "/default/*"); server.setHandler(handler); return server; } public static class ProxyJettyServerInit implements JettyServerInitializer { private final DruidNode node; @Inject public ProxyJettyServerInit(@Self DruidNode node) { this.node = node; } @Override public void initialize(Server server, Injector injector) { final ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS); root.addServlet(new ServletHolder(new DefaultServlet()), "/*"); final QueryHostFinder hostFinder = new QueryHostFinder(null, new RendezvousHashAvaticaConnectionBalancer()) { @Override public io.druid.client.selector.Server getServer(Query query) { return new TestServer("http", "localhost", node.getPlaintextPort()); } @Override public io.druid.client.selector.Server getDefaultServer() { return new TestServer("http", "localhost", node.getPlaintextPort()); } @Override public Collection<io.druid.client.selector.Server> getAllServers() { return ImmutableList.of( new TestServer("http", "localhost", node.getPlaintextPort()), new TestServer("http", "localhost", port1), new TestServer("http", "localhost", port2) ); } }; ObjectMapper jsonMapper = injector.getInstance(ObjectMapper.class); ServletHolder holder = new ServletHolder( new AsyncQueryForwardingServlet( new MapQueryToolChestWarehouse(ImmutableMap.<Class<? extends Query>, QueryToolChest>of()), jsonMapper, injector.getInstance(Key.get(ObjectMapper.class, Smile.class)), hostFinder, injector.getProvider(HttpClient.class), injector.getInstance(DruidHttpClientConfig.class), new NoopServiceEmitter(), new RequestLogger() { @Override public void log(RequestLogLine requestLogLine) throws IOException { // noop } }, new DefaultGenericQueryMetricsFactory(jsonMapper), new NoopEscalator() ) { @Override protected URI rewriteURI(HttpServletRequest request, String scheme, String host) { String uri = super.rewriteURI(request, scheme, host).toString(); if (uri.contains("/druid/v2")) { return URI.create(uri.replace("/druid/v2", "/default")); } return URI.create(uri.replace("/proxy", "")); } }); //NOTE: explicit maxThreads to workaround https://tickets.puppetlabs.com/browse/TK-152 holder.setInitParameter("maxThreads", "256"); root.addServlet(holder, "/proxy/*"); root.addServlet(holder, "/druid/v2/*"); JettyServerInitUtils.addExtensionFilters(root, injector); root.addFilter(GuiceFilter.class, "/slow/*", null); root.addFilter(GuiceFilter.class, "/default/*", null); root.addFilter(GuiceFilter.class, "/exception/*", null); final HandlerList handlerList = new HandlerList(); handlerList.setHandlers(new Handler[]{JettyServerInitUtils.wrapWithDefaultGzipHandler(root)}); server.setHandler(handlerList); } } @Test public void testRewriteURI() throws Exception { // test params Assert.assertEquals( new URI("http://localhost:1234/some/path?param=1"), AsyncQueryForwardingServlet.makeURI("http", "localhost:1234", "/some/path", "param=1") ); // HttpServletRequest.getQueryString returns encoded form // use ascii representation in case URI is using non-ascii characters Assert.assertEquals( "http://[2a00:1450:4007:805::1007]:1234/some/path?param=1&param2=%E2%82%AC", AsyncQueryForwardingServlet.makeURI( "http", HostAndPort.fromParts("2a00:1450:4007:805::1007", 1234).toString(), "/some/path", "param=1&param2=%E2%82%AC" ).toASCIIString() ); // test null query Assert.assertEquals( new URI("http://localhost/"), AsyncQueryForwardingServlet.makeURI("http", "localhost", "/", null) ); } private static class TestServer implements io.druid.client.selector.Server { private final String scheme; private final String address; private final int port; public TestServer(String scheme, String address, int port) { this.scheme = scheme; this.address = address; this.port = port; } @Override public String getScheme() { return scheme; } @Override public String getHost() { return address + ":" + port; } @Override public String getAddress() { return address; } @Override public int getPort() { return port; } } }
/* * Copyright 2015 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.pool; import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelInitializer; import io.netty.channel.EventLoopGroup; import io.netty.channel.local.LocalAddress; import io.netty.channel.local.LocalChannel; import io.netty.channel.local.LocalEventLoopGroup; import io.netty.channel.local.LocalServerChannel; import io.netty.channel.pool.FixedChannelPool.AcquireTimeoutAction; import io.netty.util.concurrent.Future; import org.junit.Test; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.junit.Assert.*; public class FixedChannelPoolTest { private static final String LOCAL_ADDR_ID = "test.id"; @Test public void testAcquire() throws Exception { EventLoopGroup group = new LocalEventLoopGroup(); LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); CountingChannelPoolHandler handler = new CountingChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, 1, Integer.MAX_VALUE); Channel channel = pool.acquire().syncUninterruptibly().getNow(); Future<Channel> future = pool.acquire(); assertFalse(future.isDone()); pool.release(channel).syncUninterruptibly(); assertTrue(future.await(1, TimeUnit.SECONDS)); Channel channel2 = future.getNow(); assertSame(channel, channel2); assertEquals(1, handler.channelCount()); assertEquals(1, handler.acquiredCount()); assertEquals(1, handler.releasedCount()); sc.close().syncUninterruptibly(); channel2.close().syncUninterruptibly(); group.shutdownGracefully(); } @Test(expected = TimeoutException.class) public void testAcquireTimeout() throws Exception { EventLoopGroup group = new LocalEventLoopGroup(); LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new TestChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, ChannelHealthChecker.ACTIVE, AcquireTimeoutAction.FAIL, 500, 1, Integer.MAX_VALUE); Channel channel = pool.acquire().syncUninterruptibly().getNow(); Future<Channel> future = pool.acquire(); try { future.syncUninterruptibly(); } finally { sc.close().syncUninterruptibly(); channel.close().syncUninterruptibly(); group.shutdownGracefully(); } } @Test public void testAcquireNewConnection() throws Exception { EventLoopGroup group = new LocalEventLoopGroup(); LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new TestChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, ChannelHealthChecker.ACTIVE, AcquireTimeoutAction.NEW, 500, 1, Integer.MAX_VALUE); Channel channel = pool.acquire().syncUninterruptibly().getNow(); Channel channel2 = pool.acquire().syncUninterruptibly().getNow(); assertNotSame(channel, channel2); sc.close().syncUninterruptibly(); channel.close().syncUninterruptibly(); channel2.close().syncUninterruptibly(); group.shutdownGracefully(); } @Test(expected = IllegalStateException.class) public void testAcquireBoundQueue() throws Exception { EventLoopGroup group = new LocalEventLoopGroup(); LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new TestChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, 1, 1); Channel channel = pool.acquire().syncUninterruptibly().getNow(); Future<Channel> future = pool.acquire(); assertFalse(future.isDone()); try { pool.acquire().syncUninterruptibly(); } finally { sc.close().syncUninterruptibly(); channel.close().syncUninterruptibly(); group.shutdownGracefully(); } } @Test(expected = IllegalArgumentException.class) public void testReleaseDifferentPool() throws Exception { EventLoopGroup group = new LocalEventLoopGroup(); LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new TestChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, 1, 1); ChannelPool pool2 = new FixedChannelPool(cb, handler, 1, 1); Channel channel = pool.acquire().syncUninterruptibly().getNow(); try { pool2.release(channel).syncUninterruptibly(); } finally { sc.close().syncUninterruptibly(); channel.close().syncUninterruptibly(); group.shutdownGracefully(); } } private static final class TestChannelPoolHandler extends AbstractChannelPoolHandler { @Override public void channelCreated(Channel ch) throws Exception { // NOOP } } }
/** * The MIT License (MIT) * * Copyright (c) 2011-2016 Incapture Technologies LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ /* * KEEP LICENSE */ /* * Copyright (C) 2012 Frode Carlsen. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fc.cron; import static org.joda.time.DateTimeConstants.DAYS_PER_WEEK; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.joda.time.DateTime; import org.joda.time.LocalDate; import org.joda.time.MutableDateTime; /** * Parser for unix-like cron expressions: Cron expressions allow specifying combinations of criteria for time * such as: &quot;Each Monday-Friday at 08:00&quot; or &quot;Every last friday of the month at 01:30&quot; * <p> * A cron expressions consists of 5 or 6 mandatory fields (seconds may be omitted) separated by space. <br> * These are: * * <table cellspacing="8"> * <tr> * <th align="left">Field</th> * <th align="left">&nbsp;</th> * <th align="left">Allowable values</th> * <th align="left">&nbsp;</th> * <th align="left">Special Characters</th> * </tr> * <tr> * <td align="left"><code>Seconds (may be omitted)</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>0-59</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>, - * /</code></td> * </tr> * <tr> * <td align="left"><code>Minutes</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>0-59</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>, - * /</code></td> * </tr> * <tr> * <td align="left"><code>Hours</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>0-23</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>, - * /</code></td> * </tr> * <tr> * <td align="left"><code>Day of month</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>1-31</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>, - * ? / L W</code></td> * </tr> * <tr> * <td align="left"><code>Month</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>1-12 or JAN-DEC (note: english abbreviations)</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>, - * /</code></td> * </tr> * <tr> * <td align="left"><code>Day of week</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>1-7 or MON-SUN (note: english abbreviations)</code></td> * <td align="left">&nbsp;</th> * <td align="left"><code>, - * ? / L #</code></td> * </tr> * </table> * * <P> * '*' Can be used in all fields and means 'for all values'. E.g. &quot;*&quot; in minutes, means 'for all minutes' * <P> * '?' Ca be used in Day-of-month and Day-of-week fields. Used to signify 'no special value'. It is used when one want * to specify something for one of those two fields, but not the other. * <P> * '-' Used to specify a time interval. E.g. &quot;10-12&quot; in Hours field means 'for hours 10, 11 and 12' * <P> * ',' Used to specify multiple values for a field. E.g. &quot;MON,WED,FRI&quot; in Day-of-week field means &quot;for * monday, wednesday and friday&quot; * <P> * '/' Used to specify increments. E.g. &quot;0/15&quot; in Seconds field means &quot;for seconds 0, 15, 30, ad * 45&quot;. And &quot;5/15&quot; in seconds field means &quot;for seconds 5, 20, 35, and 50&quot;. If '*' s specified * before '/' it is the same as saying it starts at 0. For every field there's a list of values that can be turned on or * off. For Seconds and Minutes these range from 0-59. For Hours from 0 to 23, For Day-of-month it's 1 to 31, For Months * 1 to 12. &quot;/&quot; character helsp turn some of these values back on. Thus &quot;7/6&quot; in Months field * specify just Month 7. It doesn't turn on every 6 month following, since cron fields never roll over * <P> * 'L' Can be used on Day-of-month and Day-of-week fields. It signifies last day of the set of allowed values. In * Day-of-month field it's the last day of the month (e.g.. 31 jan, 28 feb (29 in leap years), 31 march, etc.). In * Day-of-week field it's Sunday. If there's a prefix, this will be subtracted (5L in Day-of-month means 5 days before * last day of Month: 26 jan, 23 feb, etc.) * <P> * 'W' Can be specified in Day-of-Month field. It specifies closest weekday (monday-friday). Holidays are not accounted * for. &quot;15W&quot; in Day-of-Month field means 'closest weekday to 15 i in given month'. If the 15th is a Saturday, * it gives Friday. If 15th is a Sunday, the it gives following Monday. * <P> * '#' Can be used in Day-of-Week field. For example: &quot;5#3&quot; means 'third friday in month' (day 5 = friday, #3 * - the third). If the day does not exist (e.g. &quot;5#5&quot; - 5th friday of month) and there aren't 5 fridays in * the month, then it won't match until the next month with 5 fridays. * <P> * <b>Case-sensitivt</b> No fields are case-sensitive * <P> * <b>Dependencies between fields</b> Fields are always evaluated independently, but the expression doesn't match until * the constraints of each field are met.Feltene evalueres Overlap of intervals are not allowed. That is: for * Day-of-week field &quot;FRI-MON&quot; is invalid,but &quot;FRI-SUN,MON&quot; is valid * */ public class CronExpression { enum CronFieldType { SECOND(0, 59, null), MINUTE(0, 59, null), HOUR(0, 23, null), DAY_OF_MONTH(1, 31, null), MONTH(1, 12, Arrays.asList("JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC")), DAY_OF_WEEK(1, 7, Arrays.asList("MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN")); final int from, to; final List<String> names; CronFieldType(int from, int to, List<String> names) { this.from = from; this.to = to; this.names = names; } } private final String expr; private final SimpleField secondField; private final SimpleField minuteField; private final SimpleField hourField; private final DayOfWeekField dayOfWeekField; private final SimpleField monthField; private final DayOfMonthField dayOfMonthField; public CronExpression(final String expr) { this(expr, true); } public CronExpression(final String expr, final boolean withSeconds) { if (expr == null) { throw new IllegalArgumentException("expr is null"); //$NON-NLS-1$ } this.expr = expr; final int expectedParts = withSeconds ? 6 : 5; final String[] parts = expr.split("\\s+"); //$NON-NLS-1$ if (parts.length != expectedParts) { throw new IllegalArgumentException(String.format("Invalid cron expression [%s], expected %s felt, got %s" , expr, expectedParts, parts.length)); } int ix = withSeconds ? 1 : 0; this.secondField = new SimpleField(CronFieldType.SECOND, withSeconds ? parts[0] : "0"); this.minuteField = new SimpleField(CronFieldType.MINUTE, parts[ix++]); this.hourField = new SimpleField(CronFieldType.HOUR, parts[ix++]); this.dayOfMonthField = new DayOfMonthField(parts[ix++]); this.monthField = new SimpleField(CronFieldType.MONTH, parts[ix++]); this.dayOfWeekField = new DayOfWeekField(parts[ix++]); } public static CronExpression create(final String expr) { return new CronExpression(expr, true); } public static CronExpression createWithoutSeconds(final String expr) { return new CronExpression(expr, false); } public DateTime nextTimeAfter(DateTime afterTime) { // will search for the next time within the next 4 years. If there is no // time matching, an InvalidArgumentException will be thrown (it is very // likely that the cron expression is invalid, like the February 30th). return nextTimeAfter(afterTime, afterTime.plusYears(4)); } public DateTime nextTimeAfter(DateTime afterTime, long durationInMillis) { // will search for the next time within the next durationInMillis // millisecond. Be aware that the duration is specified in millis, // but in fact the limit is checked on a day-to-day basis. return nextTimeAfter(afterTime, afterTime.plus(durationInMillis)); } public DateTime nextTimeAfter(DateTime afterTime, DateTime dateTimeBarrier) { MutableDateTime nextTime = new MutableDateTime(afterTime); nextTime.setMillisOfSecond(0); nextTime.secondOfDay().add(1); while (true) { // day of week while (true) { // month while (true) { // day of month while (true) { // hour while (true) { // minute while (true) { // second if (secondField.matches(nextTime.getSecondOfMinute())) { break; } nextTime.secondOfDay().add(1); } if (minuteField.matches(nextTime.getMinuteOfHour())) { break; } nextTime.minuteOfDay().add(1); nextTime.secondOfMinute().set(0); } if (hourField.matches(nextTime.getHourOfDay())) { break; } nextTime.hourOfDay().add(1); nextTime.minuteOfHour().set(0); nextTime.secondOfMinute().set(0); } if (dayOfMonthField.matches(new LocalDate(nextTime))) { break; } nextTime.addDays(1); nextTime.setTime(0, 0, 0, 0); checkIfDateTimeBarrierIsReached(nextTime, dateTimeBarrier); } if (monthField.matches(nextTime.getMonthOfYear())) { break; } nextTime.addMonths(1); nextTime.setDayOfMonth(1); nextTime.setTime(0, 0, 0, 0); checkIfDateTimeBarrierIsReached(nextTime, dateTimeBarrier); } if (dayOfWeekField.matches(new LocalDate(nextTime))) { break; } nextTime.addDays(1); nextTime.setTime(0, 0, 0, 0); checkIfDateTimeBarrierIsReached(nextTime, dateTimeBarrier); } return nextTime.toDateTime(); } private static void checkIfDateTimeBarrierIsReached(MutableDateTime nextTime, DateTime dateTimeBarrier) { if (nextTime.isAfter(dateTimeBarrier)) { throw new IllegalArgumentException("No next execution time could be determined that is before the limit of " + dateTimeBarrier); } } @Override public String toString() { return getClass().getSimpleName() + "<" + expr + ">"; } static class FieldPart { private Integer from, to, increment; private String modifier, incrementModifier; } abstract static class BasicField { private static final Pattern CRON_FELT_REGEXP = Pattern .compile("(?: # start of group 1\n" + " (?:(?<all>\\*)|(?<ignorer>\\?)|(?<last>L)) # globalt flag (L, ?, *)\n" + " | (?<start>[0-9]{1,2}|[a-z]{3,3}) # or start number or symbol\n" + " (?: # start of group 2\n" + " (?<mod>L|W) # modifier (L,W)\n" + " | -(?<end>[0-9]{1,2}|[a-z]{3,3}) # or end nummer or symbol (in range)\n" + " )? # end of group 2\n" + ") # end of group 1\n" + "(?:(?<inkmod>/|\\#)(?<ink>[0-9]{1,7}))? # increment and increment modifier (/ or \\#)\n" , Pattern.CASE_INSENSITIVE | Pattern.COMMENTS); final CronFieldType fieldType; final List<FieldPart> parts = new ArrayList<>(); private BasicField(CronFieldType fieldType, String fieldExpr) { this.fieldType = fieldType; parse(fieldExpr); } private void parse(String fieldExpr) { // NOSONAR String[] rangeParts = fieldExpr.split(","); for (String rangePart : rangeParts) { Matcher m = CRON_FELT_REGEXP.matcher(rangePart); if (!m.matches()) { throw new IllegalArgumentException("Invalid cron field '" + rangePart + "' for field [" + fieldType + "]"); } String startNummer = m.group("start"); String modifier = m.group("mod"); String sluttNummer = m.group("end"); String inkrementModifier = m.group("inkmod"); String inkrement = m.group("ink"); FieldPart part = new FieldPart(); part.increment = 999; if (startNummer != null) { part.from = mapValue(startNummer); part.modifier = modifier; if (sluttNummer != null) { part.to = mapValue(sluttNummer); part.increment = 1; } else if (inkrement != null) { part.to = fieldType.to; } else { part.to = part.from; } } else if (m.group("all") != null) { part.from = fieldType.from; part.to = fieldType.to; part.increment = 1; } else if (m.group("ignorer") != null) { part.modifier = m.group("ignorer"); } else if (m.group("last") != null) { part.modifier = m.group("last"); } else { throw new IllegalArgumentException("Invalid cron part: " + rangePart); } if (inkrement != null) { part.incrementModifier = inkrementModifier; part.increment = Integer.valueOf(inkrement); } validateRange(part); validatePart(part); parts.add(part); } } protected void validatePart(FieldPart part) { if (part.modifier != null) { throw new IllegalArgumentException(String.format("Invalid modifier [%s]", part.modifier)); } else if (part.incrementModifier != null && !"/".equals(part.incrementModifier)) { throw new IllegalArgumentException(String.format("Invalid increment modifier [%s]", part.incrementModifier)); } } private void validateRange(FieldPart part) { if ((part.from != null && part.from < fieldType.from) || (part.to != null && part.to > fieldType.to)) { throw new IllegalArgumentException(String.format("Invalid interval [%s-%s], must be %s<=_<=%s", part.from, part.to, fieldType.from, fieldType.to)); } else if (part.from != null && part.to != null && part.from > part.to) { throw new IllegalArgumentException( String.format( "Invalid interval [%s-%s]. Rolling periods are not supported (ex. 5-1, only 1-5) since this won't give a deterministic result. Must be %s<=_<=%s", part.from, part.to, fieldType.from, fieldType.to)); } } protected Integer mapValue(String value) { Integer idx; if (fieldType.names != null && (idx = fieldType.names.indexOf(value.toUpperCase(Locale.getDefault()))) >= 0) { return idx + 1; } return Integer.valueOf(value); } protected boolean matches(int val, FieldPart part) { if (val >= part.from && val <= part.to && (val - part.from) % part.increment == 0) { return true; } return false; } } static class SimpleField extends BasicField { SimpleField(CronFieldType fieldType, String fieldExpr) { super(fieldType, fieldExpr); } public boolean matches(int val) { if (val >= fieldType.from && val <= fieldType.to) { for (FieldPart part : parts) { if (matches(val, part)) { return true; } } } return false; } } static class DayOfWeekField extends BasicField { DayOfWeekField(String fieldExpr) { super(CronFieldType.DAY_OF_WEEK, fieldExpr); } boolean matches(LocalDate dato) { for (FieldPart part : parts) { if ("L".equals(part.modifier)) { return dato.getDayOfWeek() == part.from && dato.getDayOfMonth() > (dato.dayOfMonth().getMaximumValue() - DAYS_PER_WEEK); } else if ("#".equals(part.incrementModifier)) { if (dato.getDayOfWeek() == part.from) { int num = dato.getDayOfMonth() / 7; return part.increment == (dato.getDayOfMonth() % 7 == 0 ? num : num + 1); } return false; } else if (matches(dato.getDayOfWeek(), part)) { return true; } } return false; } @Override protected Integer mapValue(String value) { // Use 1-7 for weedays, but 0 will also represent sunday (linux practice) return "0".equals(value) ? Integer.valueOf(7) : super.mapValue(value); } @Override protected boolean matches(int val, FieldPart part) { return "?".equals(part.modifier) || super.matches(val, part); } @Override protected void validatePart(FieldPart part) { if (part.modifier != null && Arrays.asList("L", "?").indexOf(part.modifier) == -1) { throw new IllegalArgumentException(String.format("Invalid modifier [%s]", part.modifier)); } else if (part.incrementModifier != null && Arrays.asList("/", "#").indexOf(part.incrementModifier) == -1) { throw new IllegalArgumentException(String.format("Invalid increment modifier [%s]", part.incrementModifier)); } } } static class DayOfMonthField extends BasicField { DayOfMonthField(String fieldExpr) { super(CronFieldType.DAY_OF_MONTH, fieldExpr); } boolean matches(LocalDate dato) { for (FieldPart part : parts) { if ("L".equals(part.modifier)) { return dato.getDayOfMonth() == (dato.dayOfMonth().getMaximumValue() - (part.from == null ? 0 : part.from)); } else if ("W".equals(part.modifier)) { if (dato.getDayOfWeek() <= 5) { if (dato.getDayOfMonth() == part.from) { return true; } else if (dato.getDayOfWeek() == 5) { return dato.plusDays(1).getDayOfMonth() == part.from; } else if (dato.getDayOfWeek() == 1) { return dato.minusDays(1).getDayOfMonth() == part.from; } } } else if (matches(dato.getDayOfMonth(), part)) { return true; } } return false; } @Override protected void validatePart(FieldPart part) { if (part.modifier != null && Arrays.asList("L", "W", "?").indexOf(part.modifier) == -1) { throw new IllegalArgumentException(String.format("Invalid modifier [%s]", part.modifier)); } else if (part.incrementModifier != null && !"/".equals(part.incrementModifier)) { throw new IllegalArgumentException(String.format("Invalid increment modifier [%s]", part.incrementModifier)); } } @Override protected boolean matches(int val, FieldPart part) { return "?".equals(part.modifier) || super.matches(val, part); } } }
package com.docusign.esign.model; import java.util.Objects; import java.util.Arrays; import com.docusign.esign.model.NotaryJournalCredibleWitness; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; /** * NotaryJournalMetaData. * */ public class NotaryJournalMetaData { @JsonProperty("comment") private String comment = null; @JsonProperty("credibleWitnesses") private java.util.List<NotaryJournalCredibleWitness> credibleWitnesses = null; @JsonProperty("signatureImage") private String signatureImage = null; @JsonProperty("signerIdType") private String signerIdType = null; /** * comment. * * @return NotaryJournalMetaData **/ public NotaryJournalMetaData comment(String comment) { this.comment = comment; return this; } /** * . * @return comment **/ @ApiModelProperty(value = "") public String getComment() { return comment; } /** * setComment. **/ public void setComment(String comment) { this.comment = comment; } /** * credibleWitnesses. * * @return NotaryJournalMetaData **/ public NotaryJournalMetaData credibleWitnesses(java.util.List<NotaryJournalCredibleWitness> credibleWitnesses) { this.credibleWitnesses = credibleWitnesses; return this; } /** * addCredibleWitnessesItem. * * @return NotaryJournalMetaData **/ public NotaryJournalMetaData addCredibleWitnessesItem(NotaryJournalCredibleWitness credibleWitnessesItem) { if (this.credibleWitnesses == null) { this.credibleWitnesses = new java.util.ArrayList<NotaryJournalCredibleWitness>(); } this.credibleWitnesses.add(credibleWitnessesItem); return this; } /** * . * @return credibleWitnesses **/ @ApiModelProperty(value = "") public java.util.List<NotaryJournalCredibleWitness> getCredibleWitnesses() { return credibleWitnesses; } /** * setCredibleWitnesses. **/ public void setCredibleWitnesses(java.util.List<NotaryJournalCredibleWitness> credibleWitnesses) { this.credibleWitnesses = credibleWitnesses; } /** * signatureImage. * * @return NotaryJournalMetaData **/ public NotaryJournalMetaData signatureImage(String signatureImage) { this.signatureImage = signatureImage; return this; } /** * . * @return signatureImage **/ @ApiModelProperty(value = "") public String getSignatureImage() { return signatureImage; } /** * setSignatureImage. **/ public void setSignatureImage(String signatureImage) { this.signatureImage = signatureImage; } /** * signerIdType. * * @return NotaryJournalMetaData **/ public NotaryJournalMetaData signerIdType(String signerIdType) { this.signerIdType = signerIdType; return this; } /** * . * @return signerIdType **/ @ApiModelProperty(value = "") public String getSignerIdType() { return signerIdType; } /** * setSignerIdType. **/ public void setSignerIdType(String signerIdType) { this.signerIdType = signerIdType; } /** * Compares objects. * * @return true or false depending on comparison result. */ @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } NotaryJournalMetaData notaryJournalMetaData = (NotaryJournalMetaData) o; return Objects.equals(this.comment, notaryJournalMetaData.comment) && Objects.equals(this.credibleWitnesses, notaryJournalMetaData.credibleWitnesses) && Objects.equals(this.signatureImage, notaryJournalMetaData.signatureImage) && Objects.equals(this.signerIdType, notaryJournalMetaData.signerIdType); } /** * Returns the HashCode. */ @Override public int hashCode() { return Objects.hash(comment, credibleWitnesses, signatureImage, signerIdType); } /** * Converts the given object to string. */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class NotaryJournalMetaData {\n"); sb.append(" comment: ").append(toIndentedString(comment)).append("\n"); sb.append(" credibleWitnesses: ").append(toIndentedString(credibleWitnesses)).append("\n"); sb.append(" signatureImage: ").append(toIndentedString(signatureImage)).append("\n"); sb.append(" signerIdType: ").append(toIndentedString(signerIdType)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Copyright (C) 2013 The Libphonenumber Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.i18n.phonenumbers; import com.google.i18n.phonenumbers.Phonenumber.PhoneNumber; /** * Unit tests for ShortNumberInfo.java * * @author Shaopeng Jia */ public class ShortNumberInfoTest extends TestMetadataTestCase { private static final ShortNumberInfo shortInfo = ShortNumberInfo.getInstance(); public void testIsPossibleShortNumber() { PhoneNumber possibleNumber = new PhoneNumber(); possibleNumber.setCountryCode(33).setNationalNumber(123456L); assertTrue(shortInfo.isPossibleShortNumber(possibleNumber)); assertTrue( shortInfo.isPossibleShortNumberForRegion(parse("123456", RegionCode.FR), RegionCode.FR)); PhoneNumber impossibleNumber = new PhoneNumber(); impossibleNumber.setCountryCode(33).setNationalNumber(9L); assertFalse(shortInfo.isPossibleShortNumber(impossibleNumber)); // Note that GB and GG share the country calling code 44, and that this number is possible but // not valid. assertTrue(shortInfo.isPossibleShortNumber( new PhoneNumber().setCountryCode(44).setNationalNumber(11001L))); } public void testIsValidShortNumber() { assertTrue(shortInfo.isValidShortNumber( new PhoneNumber().setCountryCode(33).setNationalNumber(1010L))); assertTrue(shortInfo.isValidShortNumberForRegion(parse("1010", RegionCode.FR), RegionCode.FR)); assertFalse(shortInfo.isValidShortNumber( new PhoneNumber().setCountryCode(33).setNationalNumber(123456L))); assertFalse( shortInfo.isValidShortNumberForRegion(parse("123456", RegionCode.FR), RegionCode.FR)); // Note that GB and GG share the country calling code 44. assertTrue(shortInfo.isValidShortNumber( new PhoneNumber().setCountryCode(44).setNationalNumber(18001L))); } public void testIsCarrierSpecific() { PhoneNumber carrierSpecificNumber = new PhoneNumber(); carrierSpecificNumber.setCountryCode(1).setNationalNumber(33669L); assertTrue(shortInfo.isCarrierSpecific(carrierSpecificNumber)); assertTrue( shortInfo.isCarrierSpecificForRegion(parse("33669", RegionCode.US), RegionCode.US)); PhoneNumber notCarrierSpecificNumber = new PhoneNumber(); notCarrierSpecificNumber.setCountryCode(1).setNationalNumber(911L); assertFalse(shortInfo.isCarrierSpecific(notCarrierSpecificNumber)); assertFalse( shortInfo.isCarrierSpecificForRegion(parse("911", RegionCode.US), RegionCode.US)); PhoneNumber carrierSpecificNumberForSomeRegion = new PhoneNumber(); carrierSpecificNumberForSomeRegion.setCountryCode(1).setNationalNumber(211L); assertTrue(shortInfo.isCarrierSpecific(carrierSpecificNumberForSomeRegion)); assertTrue( shortInfo.isCarrierSpecificForRegion(carrierSpecificNumberForSomeRegion, RegionCode.US)); assertFalse( shortInfo.isCarrierSpecificForRegion(carrierSpecificNumberForSomeRegion, RegionCode.BB)); } public void testIsSmsService() { PhoneNumber smsServiceNumberForSomeRegion = new PhoneNumber(); smsServiceNumberForSomeRegion.setCountryCode(1).setNationalNumber(21234L); assertTrue(shortInfo.isSmsServiceForRegion(smsServiceNumberForSomeRegion, RegionCode.US)); assertFalse(shortInfo.isSmsServiceForRegion(smsServiceNumberForSomeRegion, RegionCode.BB)); } public void testGetExpectedCost() { String premiumRateExample = shortInfo.getExampleShortNumberForCost(RegionCode.FR, ShortNumberInfo.ShortNumberCost.PREMIUM_RATE); assertEquals(ShortNumberInfo.ShortNumberCost.PREMIUM_RATE, shortInfo.getExpectedCostForRegion( parse(premiumRateExample, RegionCode.FR), RegionCode.FR)); PhoneNumber premiumRateNumber = new PhoneNumber(); premiumRateNumber.setCountryCode(33).setNationalNumber(Integer.parseInt(premiumRateExample)); assertEquals(ShortNumberInfo.ShortNumberCost.PREMIUM_RATE, shortInfo.getExpectedCost(premiumRateNumber)); String standardRateExample = shortInfo.getExampleShortNumberForCost(RegionCode.FR, ShortNumberInfo.ShortNumberCost.STANDARD_RATE); assertEquals(ShortNumberInfo.ShortNumberCost.STANDARD_RATE, shortInfo.getExpectedCostForRegion( parse(standardRateExample, RegionCode.FR), RegionCode.FR)); PhoneNumber standardRateNumber = new PhoneNumber(); standardRateNumber.setCountryCode(33).setNationalNumber(Integer.parseInt(standardRateExample)); assertEquals(ShortNumberInfo.ShortNumberCost.STANDARD_RATE, shortInfo.getExpectedCost(standardRateNumber)); String tollFreeExample = shortInfo.getExampleShortNumberForCost(RegionCode.FR, ShortNumberInfo.ShortNumberCost.TOLL_FREE); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCostForRegion(parse(tollFreeExample, RegionCode.FR), RegionCode.FR)); PhoneNumber tollFreeNumber = new PhoneNumber(); tollFreeNumber.setCountryCode(33).setNationalNumber(Integer.parseInt(tollFreeExample)); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCost(tollFreeNumber)); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCostForRegion(parse("12345", RegionCode.FR), RegionCode.FR)); PhoneNumber unknownCostNumber = new PhoneNumber(); unknownCostNumber.setCountryCode(33).setNationalNumber(12345L); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCost(unknownCostNumber)); // Test that an invalid number may nevertheless have a cost other than UNKNOWN_COST. assertFalse( shortInfo.isValidShortNumberForRegion(parse("116123", RegionCode.FR), RegionCode.FR)); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCostForRegion(parse("116123", RegionCode.FR), RegionCode.FR)); PhoneNumber invalidNumber = new PhoneNumber(); invalidNumber.setCountryCode(33).setNationalNumber(116123L); assertFalse(shortInfo.isValidShortNumber(invalidNumber)); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCost(invalidNumber)); // Test a nonexistent country code. assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCostForRegion(parse("911", RegionCode.US), RegionCode.ZZ)); unknownCostNumber.clear(); unknownCostNumber.setCountryCode(123).setNationalNumber(911L); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCost(unknownCostNumber)); } public void testGetExpectedCostForSharedCountryCallingCode() { // Test some numbers which have different costs in countries sharing the same country calling // code. In Australia, 1234 is premium-rate, 1194 is standard-rate, and 733 is toll-free. These // are not known to be valid numbers in the Christmas Islands. String ambiguousPremiumRateString = "1234"; PhoneNumber ambiguousPremiumRateNumber = new PhoneNumber().setCountryCode(61).setNationalNumber(1234L); String ambiguousStandardRateString = "1194"; PhoneNumber ambiguousStandardRateNumber = new PhoneNumber().setCountryCode(61).setNationalNumber(1194L); String ambiguousTollFreeString = "733"; PhoneNumber ambiguousTollFreeNumber = new PhoneNumber().setCountryCode(61).setNationalNumber(733L); assertTrue(shortInfo.isValidShortNumber(ambiguousPremiumRateNumber)); assertTrue(shortInfo.isValidShortNumber(ambiguousStandardRateNumber)); assertTrue(shortInfo.isValidShortNumber(ambiguousTollFreeNumber)); assertTrue(shortInfo.isValidShortNumberForRegion( parse(ambiguousPremiumRateString, RegionCode.AU), RegionCode.AU)); assertEquals(ShortNumberInfo.ShortNumberCost.PREMIUM_RATE, shortInfo.getExpectedCostForRegion( parse(ambiguousPremiumRateString, RegionCode.AU), RegionCode.AU)); assertFalse(shortInfo.isValidShortNumberForRegion( parse(ambiguousPremiumRateString, RegionCode.CX), RegionCode.CX)); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCostForRegion( parse(ambiguousPremiumRateString, RegionCode.CX), RegionCode.CX)); // PREMIUM_RATE takes precedence over UNKNOWN_COST. assertEquals(ShortNumberInfo.ShortNumberCost.PREMIUM_RATE, shortInfo.getExpectedCost(ambiguousPremiumRateNumber)); assertTrue(shortInfo.isValidShortNumberForRegion( parse(ambiguousStandardRateString, RegionCode.AU), RegionCode.AU)); assertEquals(ShortNumberInfo.ShortNumberCost.STANDARD_RATE, shortInfo.getExpectedCostForRegion( parse(ambiguousStandardRateString, RegionCode.AU), RegionCode.AU)); assertFalse(shortInfo.isValidShortNumberForRegion( parse(ambiguousStandardRateString, RegionCode.CX), RegionCode.CX)); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCostForRegion( parse(ambiguousStandardRateString, RegionCode.CX), RegionCode.CX)); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCost(ambiguousStandardRateNumber)); assertTrue(shortInfo.isValidShortNumberForRegion(parse(ambiguousTollFreeString, RegionCode.AU), RegionCode.AU)); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCostForRegion( parse(ambiguousTollFreeString, RegionCode.AU), RegionCode.AU)); assertFalse(shortInfo.isValidShortNumberForRegion(parse(ambiguousTollFreeString, RegionCode.CX), RegionCode.CX)); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCostForRegion( parse(ambiguousTollFreeString, RegionCode.CX), RegionCode.CX)); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCost(ambiguousTollFreeNumber)); } public void testGetExampleShortNumber() { assertEquals("8711", shortInfo.getExampleShortNumber(RegionCode.AM)); assertEquals("1010", shortInfo.getExampleShortNumber(RegionCode.FR)); assertEquals("", shortInfo.getExampleShortNumber(RegionCode.UN001)); assertEquals("", shortInfo.getExampleShortNumber(null)); } public void testGetExampleShortNumberForCost() { assertEquals("3010", shortInfo.getExampleShortNumberForCost(RegionCode.FR, ShortNumberInfo.ShortNumberCost.TOLL_FREE)); assertEquals("1023", shortInfo.getExampleShortNumberForCost(RegionCode.FR, ShortNumberInfo.ShortNumberCost.STANDARD_RATE)); assertEquals("42000", shortInfo.getExampleShortNumberForCost(RegionCode.FR, ShortNumberInfo.ShortNumberCost.PREMIUM_RATE)); assertEquals("", shortInfo.getExampleShortNumberForCost(RegionCode.FR, ShortNumberInfo.ShortNumberCost.UNKNOWN_COST)); } public void testConnectsToEmergencyNumber_US() { assertTrue(shortInfo.connectsToEmergencyNumber("911", RegionCode.US)); assertTrue(shortInfo.connectsToEmergencyNumber("112", RegionCode.US)); assertFalse(shortInfo.connectsToEmergencyNumber("999", RegionCode.US)); } public void testConnectsToEmergencyNumberLongNumber_US() { assertTrue(shortInfo.connectsToEmergencyNumber("9116666666", RegionCode.US)); assertTrue(shortInfo.connectsToEmergencyNumber("1126666666", RegionCode.US)); assertFalse(shortInfo.connectsToEmergencyNumber("9996666666", RegionCode.US)); } public void testConnectsToEmergencyNumberWithFormatting_US() { assertTrue(shortInfo.connectsToEmergencyNumber("9-1-1", RegionCode.US)); assertTrue(shortInfo.connectsToEmergencyNumber("1-1-2", RegionCode.US)); assertFalse(shortInfo.connectsToEmergencyNumber("9-9-9", RegionCode.US)); } public void testConnectsToEmergencyNumberWithPlusSign_US() { assertFalse(shortInfo.connectsToEmergencyNumber("+911", RegionCode.US)); assertFalse(shortInfo.connectsToEmergencyNumber("\uFF0B911", RegionCode.US)); assertFalse(shortInfo.connectsToEmergencyNumber(" +911", RegionCode.US)); assertFalse(shortInfo.connectsToEmergencyNumber("+112", RegionCode.US)); assertFalse(shortInfo.connectsToEmergencyNumber("+999", RegionCode.US)); } public void testConnectsToEmergencyNumber_BR() { assertTrue(shortInfo.connectsToEmergencyNumber("911", RegionCode.BR)); assertTrue(shortInfo.connectsToEmergencyNumber("190", RegionCode.BR)); assertFalse(shortInfo.connectsToEmergencyNumber("999", RegionCode.BR)); } public void testConnectsToEmergencyNumberLongNumber_BR() { // Brazilian emergency numbers don't work when additional digits are appended. assertFalse(shortInfo.connectsToEmergencyNumber("9111", RegionCode.BR)); assertFalse(shortInfo.connectsToEmergencyNumber("1900", RegionCode.BR)); assertFalse(shortInfo.connectsToEmergencyNumber("9996", RegionCode.BR)); } public void testConnectsToEmergencyNumber_CL() { assertTrue(shortInfo.connectsToEmergencyNumber("131", RegionCode.CL)); assertTrue(shortInfo.connectsToEmergencyNumber("133", RegionCode.CL)); } public void testConnectsToEmergencyNumberLongNumber_CL() { // Chilean emergency numbers don't work when additional digits are appended. assertFalse(shortInfo.connectsToEmergencyNumber("1313", RegionCode.CL)); assertFalse(shortInfo.connectsToEmergencyNumber("1330", RegionCode.CL)); } public void testConnectsToEmergencyNumber_AO() { // Angola doesn't have any metadata for emergency numbers in the test metadata. assertFalse(shortInfo.connectsToEmergencyNumber("911", RegionCode.AO)); assertFalse(shortInfo.connectsToEmergencyNumber("222123456", RegionCode.AO)); assertFalse(shortInfo.connectsToEmergencyNumber("923123456", RegionCode.AO)); } public void testConnectsToEmergencyNumber_ZW() { // Zimbabwe doesn't have any metadata in the test metadata. assertFalse(shortInfo.connectsToEmergencyNumber("911", RegionCode.ZW)); assertFalse(shortInfo.connectsToEmergencyNumber("01312345", RegionCode.ZW)); assertFalse(shortInfo.connectsToEmergencyNumber("0711234567", RegionCode.ZW)); } public void testIsEmergencyNumber_US() { assertTrue(shortInfo.isEmergencyNumber("911", RegionCode.US)); assertTrue(shortInfo.isEmergencyNumber("112", RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber("999", RegionCode.US)); } public void testIsEmergencyNumberLongNumber_US() { assertFalse(shortInfo.isEmergencyNumber("9116666666", RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber("1126666666", RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber("9996666666", RegionCode.US)); } public void testIsEmergencyNumberWithFormatting_US() { assertTrue(shortInfo.isEmergencyNumber("9-1-1", RegionCode.US)); assertTrue(shortInfo.isEmergencyNumber("*911", RegionCode.US)); assertTrue(shortInfo.isEmergencyNumber("1-1-2", RegionCode.US)); assertTrue(shortInfo.isEmergencyNumber("*112", RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber("9-9-9", RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber("*999", RegionCode.US)); } public void testIsEmergencyNumberWithPlusSign_US() { assertFalse(shortInfo.isEmergencyNumber("+911", RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber("\uFF0B911", RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber(" +911", RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber("+112", RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber("+999", RegionCode.US)); } public void testIsEmergencyNumber_BR() { assertTrue(shortInfo.isEmergencyNumber("911", RegionCode.BR)); assertTrue(shortInfo.isEmergencyNumber("190", RegionCode.BR)); assertFalse(shortInfo.isEmergencyNumber("999", RegionCode.BR)); } public void testIsEmergencyNumberLongNumber_BR() { assertFalse(shortInfo.isEmergencyNumber("9111", RegionCode.BR)); assertFalse(shortInfo.isEmergencyNumber("1900", RegionCode.BR)); assertFalse(shortInfo.isEmergencyNumber("9996", RegionCode.BR)); } public void testIsEmergencyNumber_AO() { // Angola doesn't have any metadata for emergency numbers in the test metadata. assertFalse(shortInfo.isEmergencyNumber("911", RegionCode.AO)); assertFalse(shortInfo.isEmergencyNumber("222123456", RegionCode.AO)); assertFalse(shortInfo.isEmergencyNumber("923123456", RegionCode.AO)); } public void testIsEmergencyNumber_ZW() { // Zimbabwe doesn't have any metadata in the test metadata. assertFalse(shortInfo.isEmergencyNumber("911", RegionCode.ZW)); assertFalse(shortInfo.isEmergencyNumber("01312345", RegionCode.ZW)); assertFalse(shortInfo.isEmergencyNumber("0711234567", RegionCode.ZW)); } public void testEmergencyNumberForSharedCountryCallingCode() { // Test the emergency number 112, which is valid in both Australia and the Christmas Islands. assertTrue(shortInfo.isEmergencyNumber("112", RegionCode.AU)); assertTrue(shortInfo.isValidShortNumberForRegion(parse("112", RegionCode.AU), RegionCode.AU)); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCostForRegion(parse("112", RegionCode.AU), RegionCode.AU)); assertTrue(shortInfo.isEmergencyNumber("112", RegionCode.CX)); assertTrue(shortInfo.isValidShortNumberForRegion(parse("112", RegionCode.CX), RegionCode.CX)); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCostForRegion(parse("112", RegionCode.CX), RegionCode.CX)); PhoneNumber sharedEmergencyNumber = new PhoneNumber().setCountryCode(61).setNationalNumber(112L); assertTrue(shortInfo.isValidShortNumber(sharedEmergencyNumber)); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCost(sharedEmergencyNumber)); } public void testOverlappingNANPANumber() { // 211 is an emergency number in Barbados, while it is a toll-free information line in Canada // and the USA. assertTrue(shortInfo.isEmergencyNumber("211", RegionCode.BB)); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCostForRegion(parse("211", RegionCode.BB), RegionCode.BB)); assertFalse(shortInfo.isEmergencyNumber("211", RegionCode.US)); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCostForRegion(parse("211", RegionCode.US), RegionCode.US)); assertFalse(shortInfo.isEmergencyNumber("211", RegionCode.CA)); assertEquals(ShortNumberInfo.ShortNumberCost.TOLL_FREE, shortInfo.getExpectedCostForRegion(parse("211", RegionCode.CA), RegionCode.CA)); } public void testCountryCallingCodeIsNotIgnored() { // +46 is the country calling code for Sweden (SE), and 40404 is a valid short number in the US. assertFalse(shortInfo.isPossibleShortNumberForRegion( parse("+4640404", RegionCode.SE), RegionCode.US)); assertFalse(shortInfo.isValidShortNumberForRegion( parse("+4640404", RegionCode.SE), RegionCode.US)); assertEquals(ShortNumberInfo.ShortNumberCost.UNKNOWN_COST, shortInfo.getExpectedCostForRegion( parse("+4640404", RegionCode.SE), RegionCode.US)); } private PhoneNumber parse(String number, String regionCode) { try { return phoneUtil.parse(number, regionCode); } catch (NumberParseException e) { throw new AssertionError( "Test input data should always parse correctly: " + number + " (" + regionCode + ")", e); } } }
/* * Copyright 2014 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.world.propagation; import com.google.common.collect.Maps; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.terasology.TerasologyTestingEnvironment; import org.terasology.assets.ResourceUrn; import org.terasology.assets.management.AssetManager; import org.terasology.entitySystem.entity.EntityRef; import org.terasology.math.Region3i; import org.terasology.math.Side; import org.terasology.math.geom.Vector3i; import org.terasology.registry.CoreRegistry; import org.terasology.world.biomes.BiomeManager; import org.terasology.world.block.Block; import org.terasology.world.block.BlockManager; import org.terasology.world.block.BlockUri; import org.terasology.world.block.family.SymmetricBlockFamilyFactory; import org.terasology.world.block.internal.BlockManagerImpl; import org.terasology.world.block.loader.BlockFamilyDefinition; import org.terasology.world.block.loader.BlockFamilyDefinitionData; import org.terasology.world.block.shapes.BlockShape; import org.terasology.world.block.tiles.NullWorldAtlas; import org.terasology.world.chunks.Chunk; import org.terasology.world.chunks.ChunkConstants; import org.terasology.world.chunks.ChunkProvider; import org.terasology.world.chunks.ChunkRegionListener; import org.terasology.world.chunks.internal.ChunkImpl; import org.terasology.world.internal.ChunkViewCore; import org.terasology.world.propagation.light.InternalLightProcessor; import org.terasology.world.propagation.light.SunlightPropagationRules; import org.terasology.world.propagation.light.SunlightRegenPropagationRules; import org.terasology.world.propagation.light.SunlightRegenWorldView; import org.terasology.world.propagation.light.SunlightWorldView; import java.util.Collection; import java.util.Map; import static org.junit.Assert.assertEquals; /** */ public class BetweenChunkPropagationTest extends TerasologyTestingEnvironment { private BlockManagerImpl blockManager; private BiomeManager biomeManager; private Block solid; private SunlightPropagationRules lightRules; private SunlightRegenPropagationRules regenRules; private SelectChunkProvider provider = new SelectChunkProvider(); private SunlightRegenWorldView regenWorldView; private SunlightWorldView lightWorldView; private BatchPropagator sunlightPropagator; private SunlightRegenBatchPropagator propagator; @Before @Override public void setup() throws Exception { super.setup(); AssetManager assetManager = CoreRegistry.get(AssetManager.class); regenRules = new SunlightRegenPropagationRules(); blockManager = new BlockManagerImpl(new NullWorldAtlas(), assetManager, true); biomeManager = Mockito.mock(BiomeManager.class); CoreRegistry.put(BlockManager.class, blockManager); BlockFamilyDefinitionData solidData = new BlockFamilyDefinitionData(); solidData.getBaseSection().setDisplayName("Stone"); solidData.getBaseSection().setShape(assetManager.getAsset("engine:cube", BlockShape.class).get()); solidData.getBaseSection().setTranslucent(false); solidData.setFamilyFactory(new SymmetricBlockFamilyFactory()); assetManager.loadAsset(new ResourceUrn("engine:stone"), solidData, BlockFamilyDefinition.class); solid = blockManager.getBlock(new BlockUri(new ResourceUrn("engine:stone"))); regenWorldView = new SunlightRegenWorldView(provider); lightWorldView = new SunlightWorldView(provider); lightRules = new SunlightPropagationRules(regenWorldView); sunlightPropagator = new StandardBatchPropagator(lightRules, lightWorldView); propagator = new SunlightRegenBatchPropagator(regenRules, regenWorldView, sunlightPropagator, lightWorldView); } @Test public void testBetweenChunksSimple() { Chunk topChunk = new ChunkImpl(new Vector3i(0, 1, 0), blockManager, biomeManager); Chunk bottomChunk = new ChunkImpl(new Vector3i(0, 0, 0), blockManager, biomeManager); provider.addChunk(topChunk); provider.addChunk(bottomChunk); for (Vector3i pos : Region3i.createFromMinAndSize(new Vector3i(0, 0, 0), new Vector3i(ChunkConstants.SIZE_X, 1, ChunkConstants.SIZE_Z))) { topChunk.setSunlight(pos, ChunkConstants.MAX_SUNLIGHT); topChunk.setSunlightRegen(pos, ChunkConstants.MAX_SUNLIGHT_REGEN); } InternalLightProcessor.generateInternalLighting(bottomChunk); propagator.propagateBetween(topChunk, bottomChunk, Side.BOTTOM, true); propagator.process(); sunlightPropagator.process(); for (Vector3i pos : ChunkConstants.CHUNK_REGION) { assertEquals("Incorrect at position " + pos, ChunkConstants.MAX_SUNLIGHT, bottomChunk.getSunlight(pos)); assertEquals("Incorrect at position " + pos, ChunkConstants.MAX_SUNLIGHT_REGEN, bottomChunk.getSunlightRegen(pos)); } } @Test public void testBetweenChunksSimpleSunlightRegenOnly() { Chunk topChunk = new ChunkImpl(new Vector3i(0, 1, 0), blockManager, biomeManager); Chunk bottomChunk = new ChunkImpl(new Vector3i(0, 0, 0), blockManager, biomeManager); provider.addChunk(topChunk); provider.addChunk(bottomChunk); for (Vector3i pos : Region3i.createFromMinAndSize(new Vector3i(0, 0, 0), new Vector3i(ChunkConstants.SIZE_X, 1, ChunkConstants.SIZE_Z))) { topChunk.setSunlight(pos, ChunkConstants.MAX_SUNLIGHT); topChunk.setSunlightRegen(pos, ChunkConstants.MAX_SUNLIGHT_REGEN); } InternalLightProcessor.generateInternalLighting(bottomChunk); propagator.propagateBetween(topChunk, bottomChunk, Side.BOTTOM, true); propagator.process(); for (Vector3i pos : ChunkConstants.CHUNK_REGION) { assertEquals("Incorrect at position " + pos, ChunkConstants.MAX_SUNLIGHT_REGEN, bottomChunk.getSunlightRegen(pos)); } } @Test public void testBetweenChunksWithOverhang() { Chunk topChunk = new ChunkImpl(new Vector3i(0, 1, 0), blockManager, biomeManager); Chunk bottomChunk = new ChunkImpl(new Vector3i(0, 0, 0), blockManager, biomeManager); provider.addChunk(topChunk); provider.addChunk(bottomChunk); for (Vector3i pos : Region3i.createFromMinAndSize(new Vector3i(0, 0, 0), new Vector3i(ChunkConstants.SIZE_X, 1, ChunkConstants.SIZE_Z))) { topChunk.setSunlight(pos, ChunkConstants.MAX_SUNLIGHT); topChunk.setSunlightRegen(pos, ChunkConstants.MAX_SUNLIGHT_REGEN); } for (Vector3i pos : Region3i.createFromMinMax(new Vector3i(16, 48, 0), new Vector3i(31, 48, 31))) { bottomChunk.setBlock(pos, solid); } InternalLightProcessor.generateInternalLighting(bottomChunk); propagator.propagateBetween(topChunk, bottomChunk, Side.BOTTOM, false); propagator.process(); sunlightPropagator.process(); for (int z = 0; z < ChunkConstants.SIZE_Z; ++z) { assertEquals(14, bottomChunk.getSunlight(16, 47, z)); } for (int z = 0; z < ChunkConstants.SIZE_Z; ++z) { assertEquals(13, bottomChunk.getSunlight(17, 47, z)); } } @Test public void testPropagateSunlightAppearingMidChunk() { Chunk topChunk = new ChunkImpl(new Vector3i(0, 1, 0), blockManager, biomeManager); Chunk bottomChunk = new ChunkImpl(new Vector3i(0, 0, 0), blockManager, biomeManager); provider.addChunk(topChunk); provider.addChunk(bottomChunk); for (Vector3i pos : Region3i.createFromMinAndSize(new Vector3i(0, 0, 0), new Vector3i(ChunkConstants.SIZE_X, 1, ChunkConstants.SIZE_Z))) { topChunk.setSunlight(pos, (byte) 0); topChunk.setSunlightRegen(pos, (byte) 0); } for (Vector3i pos : Region3i.createFromMinAndSize(new Vector3i(8, 0, 8), new Vector3i(ChunkConstants.SIZE_X - 16, 1, ChunkConstants.SIZE_Z - 16))) { topChunk.setSunlight(pos, (byte) 0); topChunk.setSunlightRegen(pos, (byte) 32); } InternalLightProcessor.generateInternalLighting(bottomChunk); propagator.propagateBetween(topChunk, bottomChunk, Side.BOTTOM, false); propagator.process(); sunlightPropagator.process(); for (int i = 0; i < 15; ++i) { assertEquals("Incorrect value at " + (33 + i), 14 - i, bottomChunk.getSunlight(7, 33 + i, 16)); } for (int i = 2; i < 33; ++i) { assertEquals("Incorrect value at " + i, 14, bottomChunk.getSunlight(7, i, 16)); } } private static class SelectChunkProvider implements ChunkProvider { private Map<Vector3i, Chunk> chunks = Maps.newHashMap(); SelectChunkProvider(Chunk... chunks) { for (Chunk chunk : chunks) { this.chunks.put(chunk.getPosition(), chunk); } } public void addChunk(Chunk chunk) { chunks.put(chunk.getPosition(), chunk); } @Override public ChunkViewCore getLocalView(Vector3i centerChunkPos) { return null; } @Override public ChunkViewCore getSubviewAroundBlock(Vector3i blockPos, int extent) { return null; } @Override public ChunkViewCore getSubviewAroundChunk(Vector3i chunkPos) { return null; } @Override public boolean reloadChunk(Vector3i pos) { return false; } @Override public void setWorldEntity(EntityRef entity) { // do nothing } @Override public Collection<Chunk> getAllChunks() { return this.chunks.values(); } @Override public void addRelevanceEntity(EntityRef entity, Vector3i distance) { // do nothing } @Override public void addRelevanceEntity(EntityRef entity, Vector3i distance, ChunkRegionListener listener) { // do nothing } @Override public void updateRelevanceEntity(EntityRef entity, Vector3i distance) { // do nothing } @Override public void removeRelevanceEntity(EntityRef entity) { // do nothing } @Override public void completeUpdate() { // do nothing } @Override public void beginUpdate() { // do nothing } @Override public boolean isChunkReady(Vector3i pos) { return false; } @Override public Chunk getChunk(int x, int y, int z) { return getChunk(new Vector3i(x, y, z)); } @Override public Chunk getChunk(Vector3i chunkPos) { return chunks.get(chunkPos); } @Override public void dispose() { // do nothing } @Override public void restart() { // do nothing } @Override public void shutdown() { // do nothing } @Override public void purgeWorld() { // do nothing } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1alpha/analytics_admin.proto package com.google.analytics.admin.v1alpha; /** * * * <pre> * Request message for CreateConversionEvent RPC * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.CreateConversionEventRequest} */ public final class CreateConversionEventRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.CreateConversionEventRequest) CreateConversionEventRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateConversionEventRequest.newBuilder() to construct. private CreateConversionEventRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateConversionEventRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateConversionEventRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateConversionEventRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.analytics.admin.v1alpha.ConversionEvent.Builder subBuilder = null; if (conversionEvent_ != null) { subBuilder = conversionEvent_.toBuilder(); } conversionEvent_ = input.readMessage( com.google.analytics.admin.v1alpha.ConversionEvent.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(conversionEvent_); conversionEvent_ = subBuilder.buildPartial(); } break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateConversionEventRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateConversionEventRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.CreateConversionEventRequest.class, com.google.analytics.admin.v1alpha.CreateConversionEventRequest.Builder.class); } public static final int CONVERSION_EVENT_FIELD_NUMBER = 1; private com.google.analytics.admin.v1alpha.ConversionEvent conversionEvent_; /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the conversionEvent field is set. */ @java.lang.Override public boolean hasConversionEvent() { return conversionEvent_ != null; } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conversionEvent. */ @java.lang.Override public com.google.analytics.admin.v1alpha.ConversionEvent getConversionEvent() { return conversionEvent_ == null ? com.google.analytics.admin.v1alpha.ConversionEvent.getDefaultInstance() : conversionEvent_; } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.analytics.admin.v1alpha.ConversionEventOrBuilder getConversionEventOrBuilder() { return getConversionEvent(); } public static final int PARENT_FIELD_NUMBER = 2; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The resource name of the parent property where this conversion event will * be created. Format: properties/123 * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the parent property where this conversion event will * be created. Format: properties/123 * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (conversionEvent_ != null) { output.writeMessage(1, getConversionEvent()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, parent_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (conversionEvent_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getConversionEvent()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, parent_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1alpha.CreateConversionEventRequest)) { return super.equals(obj); } com.google.analytics.admin.v1alpha.CreateConversionEventRequest other = (com.google.analytics.admin.v1alpha.CreateConversionEventRequest) obj; if (hasConversionEvent() != other.hasConversionEvent()) return false; if (hasConversionEvent()) { if (!getConversionEvent().equals(other.getConversionEvent())) return false; } if (!getParent().equals(other.getParent())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasConversionEvent()) { hash = (37 * hash) + CONVERSION_EVENT_FIELD_NUMBER; hash = (53 * hash) + getConversionEvent().hashCode(); } hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.analytics.admin.v1alpha.CreateConversionEventRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for CreateConversionEvent RPC * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.CreateConversionEventRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.CreateConversionEventRequest) com.google.analytics.admin.v1alpha.CreateConversionEventRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateConversionEventRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateConversionEventRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.CreateConversionEventRequest.class, com.google.analytics.admin.v1alpha.CreateConversionEventRequest.Builder.class); } // Construct using com.google.analytics.admin.v1alpha.CreateConversionEventRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (conversionEventBuilder_ == null) { conversionEvent_ = null; } else { conversionEvent_ = null; conversionEventBuilder_ = null; } parent_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_CreateConversionEventRequest_descriptor; } @java.lang.Override public com.google.analytics.admin.v1alpha.CreateConversionEventRequest getDefaultInstanceForType() { return com.google.analytics.admin.v1alpha.CreateConversionEventRequest.getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1alpha.CreateConversionEventRequest build() { com.google.analytics.admin.v1alpha.CreateConversionEventRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1alpha.CreateConversionEventRequest buildPartial() { com.google.analytics.admin.v1alpha.CreateConversionEventRequest result = new com.google.analytics.admin.v1alpha.CreateConversionEventRequest(this); if (conversionEventBuilder_ == null) { result.conversionEvent_ = conversionEvent_; } else { result.conversionEvent_ = conversionEventBuilder_.build(); } result.parent_ = parent_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1alpha.CreateConversionEventRequest) { return mergeFrom((com.google.analytics.admin.v1alpha.CreateConversionEventRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.analytics.admin.v1alpha.CreateConversionEventRequest other) { if (other == com.google.analytics.admin.v1alpha.CreateConversionEventRequest.getDefaultInstance()) return this; if (other.hasConversionEvent()) { mergeConversionEvent(other.getConversionEvent()); } if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.analytics.admin.v1alpha.CreateConversionEventRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.analytics.admin.v1alpha.CreateConversionEventRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.analytics.admin.v1alpha.ConversionEvent conversionEvent_; private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1alpha.ConversionEvent, com.google.analytics.admin.v1alpha.ConversionEvent.Builder, com.google.analytics.admin.v1alpha.ConversionEventOrBuilder> conversionEventBuilder_; /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the conversionEvent field is set. */ public boolean hasConversionEvent() { return conversionEventBuilder_ != null || conversionEvent_ != null; } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conversionEvent. */ public com.google.analytics.admin.v1alpha.ConversionEvent getConversionEvent() { if (conversionEventBuilder_ == null) { return conversionEvent_ == null ? com.google.analytics.admin.v1alpha.ConversionEvent.getDefaultInstance() : conversionEvent_; } else { return conversionEventBuilder_.getMessage(); } } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConversionEvent(com.google.analytics.admin.v1alpha.ConversionEvent value) { if (conversionEventBuilder_ == null) { if (value == null) { throw new NullPointerException(); } conversionEvent_ = value; onChanged(); } else { conversionEventBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConversionEvent( com.google.analytics.admin.v1alpha.ConversionEvent.Builder builderForValue) { if (conversionEventBuilder_ == null) { conversionEvent_ = builderForValue.build(); onChanged(); } else { conversionEventBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeConversionEvent(com.google.analytics.admin.v1alpha.ConversionEvent value) { if (conversionEventBuilder_ == null) { if (conversionEvent_ != null) { conversionEvent_ = com.google.analytics.admin.v1alpha.ConversionEvent.newBuilder(conversionEvent_) .mergeFrom(value) .buildPartial(); } else { conversionEvent_ = value; } onChanged(); } else { conversionEventBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearConversionEvent() { if (conversionEventBuilder_ == null) { conversionEvent_ = null; onChanged(); } else { conversionEvent_ = null; conversionEventBuilder_ = null; } return this; } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1alpha.ConversionEvent.Builder getConversionEventBuilder() { onChanged(); return getConversionEventFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1alpha.ConversionEventOrBuilder getConversionEventOrBuilder() { if (conversionEventBuilder_ != null) { return conversionEventBuilder_.getMessageOrBuilder(); } else { return conversionEvent_ == null ? com.google.analytics.admin.v1alpha.ConversionEvent.getDefaultInstance() : conversionEvent_; } } /** * * * <pre> * Required. The conversion event to create. * </pre> * * <code> * .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1alpha.ConversionEvent, com.google.analytics.admin.v1alpha.ConversionEvent.Builder, com.google.analytics.admin.v1alpha.ConversionEventOrBuilder> getConversionEventFieldBuilder() { if (conversionEventBuilder_ == null) { conversionEventBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1alpha.ConversionEvent, com.google.analytics.admin.v1alpha.ConversionEvent.Builder, com.google.analytics.admin.v1alpha.ConversionEventOrBuilder>( getConversionEvent(), getParentForChildren(), isClean()); conversionEvent_ = null; } return conversionEventBuilder_; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the parent property where this conversion event will * be created. Format: properties/123 * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the parent property where this conversion event will * be created. Format: properties/123 * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the parent property where this conversion event will * be created. Format: properties/123 * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the parent property where this conversion event will * be created. Format: properties/123 * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the parent property where this conversion event will * be created. Format: properties/123 * </pre> * * <code> * string parent = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.CreateConversionEventRequest) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.CreateConversionEventRequest) private static final com.google.analytics.admin.v1alpha.CreateConversionEventRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.CreateConversionEventRequest(); } public static com.google.analytics.admin.v1alpha.CreateConversionEventRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateConversionEventRequest> PARSER = new com.google.protobuf.AbstractParser<CreateConversionEventRequest>() { @java.lang.Override public CreateConversionEventRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateConversionEventRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CreateConversionEventRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateConversionEventRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1alpha.CreateConversionEventRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.download; import android.Manifest.permission; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.net.Uri; import android.os.AsyncTask; import android.os.Environment; import android.text.TextUtils; import android.util.Log; import android.webkit.MimeTypeMap; import android.webkit.URLUtil; import org.chromium.base.VisibleForTesting; import org.chromium.base.annotations.CalledByNative; import org.chromium.chrome.R; import org.chromium.chrome.browser.infobar.ConfirmInfoBar; import org.chromium.chrome.browser.infobar.InfoBar; import org.chromium.chrome.browser.infobar.InfoBarListeners; import org.chromium.chrome.browser.tab.Tab; import org.chromium.chrome.browser.tabmodel.TabModelSelector; import org.chromium.content.browser.ContentViewDownloadDelegate; import org.chromium.content.browser.DownloadInfo; import org.chromium.content_public.browser.WebContents; import org.chromium.ui.base.WindowAndroid; import org.chromium.ui.base.WindowAndroid.PermissionCallback; import org.chromium.ui.widget.Toast; import java.io.File; /** * Chrome implementation of the ContentViewDownloadDelegate interface. * * Listens to POST and GET download events. GET download requests are passed along to the * Android Download Manager. POST downloads are expected to be handled natively and listener * is responsible for adding the completed download to the download manager. * * Prompts the user when a dangerous file is downloaded. Auto-opens PDFs after downloading. */ public class ChromeDownloadDelegate implements ContentViewDownloadDelegate, InfoBarListeners.Confirm { // The application context. private final Context mContext; private final Tab mTab; private final TabModelSelector mTabModelSelector; private static final String LOGTAG = "ChromeDownloadDelegate"; // Pending download request for a dangerous file. private DownloadInfo mPendingRequest; @Override public void onConfirmInfoBarButtonClicked(ConfirmInfoBar infoBar, boolean confirm) { if (mPendingRequest.hasDownloadId()) { nativeDangerousDownloadValidated(mTab, mPendingRequest.getDownloadId(), confirm); if (confirm) { showDownloadStartNotification(); } closeBlankTab(); } else if (confirm) { // User confirmed the download. if (mPendingRequest.isGETRequest()) { enqueueDownloadManagerRequest(mPendingRequest); } else { DownloadInfo newDownloadInfo = DownloadInfo.Builder.fromDownloadInfo( mPendingRequest).setIsSuccessful(true).build(); DownloadManagerService.getDownloadManagerService(mContext).onDownloadCompleted( newDownloadInfo); } } else { // User did not accept the download, discard the file if it is a POST download. if (!mPendingRequest.isGETRequest()) { discardFile(mPendingRequest.getFilePath()); } } mPendingRequest = null; infoBar.dismissJavaOnlyInfoBar(); } @Override public void onInfoBarDismissed(InfoBar infoBar) { if (mPendingRequest != null) { if (mPendingRequest.hasDownloadId()) { nativeDangerousDownloadValidated(mTab, mPendingRequest.getDownloadId(), false); } else if (!mPendingRequest.isGETRequest()) { // Infobar was dismissed, discard the file if a POST download is pending. discardFile(mPendingRequest.getFilePath()); } } // Forget the pending request. mPendingRequest = null; } /** * Creates ChromeDownloadDelegate. * @param context The application context. * @param tabModelSelector The TabModelSelector responsible for {@code mTab}. * @param tab The corresponding tab instance. */ public ChromeDownloadDelegate( Context context, TabModelSelector tabModelSelector, Tab tab) { mContext = context; mTab = tab; mTabModelSelector = tabModelSelector; mPendingRequest = null; } /** * Return the download path of a file. * @param fileName Name of the file. * @return path of the saved file. */ protected String downloadPath(String fileName) { return mContext.getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS) + "/" + fileName; } /** * Request a download from the given url, or if a streaming viewer is available stream the * content into the viewer. * @param downloadInfo Information about the download. */ @Override public void requestHttpGetDownload(DownloadInfo downloadInfo) { // If we're dealing with A/V content that's not explicitly marked for download, check if it // is streamable. if (!DownloadManagerService.isAttachment(downloadInfo.getContentDisposition())) { // Query the package manager to see if there's a registered handler that matches. Intent intent = new Intent(Intent.ACTION_VIEW); intent.setDataAndType(Uri.parse(downloadInfo.getUrl()), downloadInfo.getMimeType()); // If the intent is resolved to ourselves, we don't want to attempt to load the url // only to try and download it again. if (DownloadManagerService.openIntent(mContext, intent, false)) { return; } } onDownloadStartNoStream(downloadInfo); } /** * Decide the file name of the final download. The file extension is derived * from the MIME type. * @param url The full URL to the content that should be downloaded. * @param mimeType The MIME type of the content reported by the server. * @param contentDisposition Content-Disposition HTTP header, if present. * @return The best guess of the file name for the downloaded object. */ @VisibleForTesting public static String fileName(String url, String mimeType, String contentDisposition) { // URLUtil#guessFileName will prefer the MIME type extension over // the file extension only if the latter is of a known MIME type. // Therefore for things like "file.php" with Content-Type PDF, it will // still generate file names like "file.php" instead of "file.pdf". // If that's the case, rebuild the file extension from the MIME type. String fileName = URLUtil.guessFileName(url, contentDisposition, mimeType); int dotIndex = fileName.lastIndexOf('.'); if (mimeType != null && !mimeType.isEmpty() && dotIndex > 1 // at least one char before the '.' && dotIndex < fileName.length()) { // '.' should not be the last char MimeTypeMap mimeTypeMap = MimeTypeMap.getSingleton(); String fileRoot = fileName.substring(0, dotIndex); String fileExtension = fileName.substring(dotIndex + 1); String fileExtensionMimeType = mimeTypeMap.getMimeTypeFromExtension(fileExtension); // If the file extension's official MIME type and {@code mimeType} // are the same, simply use the file extension. // If not, extension derived from {@code mimeType} is preferred. if (mimeType.equals(fileExtensionMimeType)) { fileName = fileRoot + "." + fileExtension; } else { String mimeExtension = mimeTypeMap.getExtensionFromMimeType(mimeType); if (mimeExtension != null && !mimeExtension.equals(fileExtension)) { fileName = fileRoot + "." + mimeExtension; } } } return fileName; } /** * Notify the host application a download should be done, even if there is a * streaming viewer available for this type. * * @param downloadInfo Information about the download. */ protected void onDownloadStartNoStream(DownloadInfo downloadInfo) { final String newMimeType = remapGenericMimeType( downloadInfo.getMimeType(), downloadInfo.getUrl(), downloadInfo.getFileName()); final String path = TextUtils.isEmpty(downloadInfo.getFileName()) ? fileName(downloadInfo.getUrl(), newMimeType, downloadInfo.getContentDisposition()) : downloadInfo.getFileName(); final File file = new File(path); final String fileName = file.getName(); if (!checkExternalStorageAndNotify(downloadPath(fileName))) { return; } String url = sanitizeDownloadUrl(downloadInfo); if (url == null) return; DownloadInfo newInfo = DownloadInfo.Builder.fromDownloadInfo(downloadInfo) .setUrl(url) .setMimeType(newMimeType).setDescription(url) .setFileName(fileName).setIsGETRequest(true).build(); // TODO(acleung): This is a temp fix to disable auto downloading if flash files. // We want to avoid downloading flash files when it is linked as an iframe. // The proper fix would be to let chrome knows which frame originated the request. if ("application/x-shockwave-flash".equals(newInfo.getMimeType())) return; if (isDangerousFile(fileName, newMimeType)) { confirmDangerousDownload(newInfo); } else { // Not a dangerous file, proceed. enqueueDownloadManagerRequest(newInfo); } } /** * Sanitize the URL for the download item. * * @param downloadInfo Information about the download. * @param sanitized URL to be downloaded, or null if the url cannot be sanitized. */ protected String sanitizeDownloadUrl(DownloadInfo downloadInfo) { return downloadInfo.getUrl(); } /** * Request user confirmation on a dangerous download. * * @param downloadInfo Information about the download. */ private void confirmDangerousDownload(DownloadInfo downloadInfo) { // A Dangerous file is already pending user confirmation, ignore the new download. if (mPendingRequest != null) return; mPendingRequest = downloadInfo; // TODO(dfalcantara): Ask ainslie@ for an icon to use for this InfoBar. int drawableId = 0; final String titleText = nativeGetDownloadWarningText(mPendingRequest.getFileName()); final String okButtonText = mContext.getResources().getString(R.string.ok); final String cancelButtonText = mContext.getResources().getString(R.string.cancel); mTab.getInfoBarContainer().addInfoBar(new ConfirmInfoBar( this, drawableId, null, titleText, null, okButtonText, cancelButtonText)); } /** * Called when a danagers download is about to start. * * @param filename File name of the download item. * @param downloadId ID of the download. */ @Override public void onDangerousDownload(String filename, int downloadId) { DownloadInfo downloadInfo = new DownloadInfo.Builder() .setFileName(filename) .setDescription(filename) .setHasDownloadId(true) .setDownloadId(downloadId).build(); confirmDangerousDownload(downloadInfo); } /** * Launch an info bar if the file name already exists for the download. * @param info The information of the file we are about to download. * @return Whether an info bar has been launched or not. */ private boolean launchInfoBarIfFileExists(final DownloadInfo info) { // Checks if file exists. final String fileName = info.getFileName(); File dir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS); if (!dir.mkdir() && !dir.isDirectory()) return false; String dirName = dir.getName(); final File file = new File(dir, info.getFileName()); String fullDirPath = file.getParent(); if (!file.exists()) return false; if (TextUtils.isEmpty(fileName) || TextUtils.isEmpty(dirName) || TextUtils.isEmpty(fullDirPath)) { return false; } nativeLaunchDownloadOverwriteInfoBar( this, mTab, info, info.getFileName(), dirName, fullDirPath); return true; } /** * Sends the download request to Android download manager. * * @param info Download information about the download. */ protected void enqueueDownloadManagerRequest(final DownloadInfo info) { if (!launchInfoBarIfFileExists(info)) { enqueueDownloadManagerRequestInternal(info); } } /** * Enqueue download manager request, only from native side. * * @param overwrite Whether or not we will overwrite the file. * @param downloadInfo The download info. * @return true iff this request resulted in the tab creating the download to close. */ @CalledByNative private boolean enqueueDownloadManagerRequestFromNative( boolean overwrite, DownloadInfo downloadInfo) { // Android DownloadManager does not have an overwriting option. // We remove the file here instead. if (overwrite) deleteFileForOverwrite(downloadInfo); return enqueueDownloadManagerRequestInternal(downloadInfo); } private void deleteFileForOverwrite(DownloadInfo info) { File dir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS); if (!dir.isDirectory()) return; final File file = new File(dir, info.getFileName()); if (!file.delete()) { Log.e(LOGTAG, "Failed to delete a file." + info.getFileName()); } } private boolean enqueueDownloadManagerRequestInternal(final DownloadInfo info) { DownloadManagerService.getDownloadManagerService( mContext.getApplicationContext()).enqueueDownloadManagerRequest(info, true); return closeBlankTab(); } /** * Check the external storage and notify user on error. * * @param fileName Name of the download file. */ protected boolean checkExternalStorageAndNotify(String fileName) { if (fileName != null && fileName.startsWith("null")) { alertDownloadFailure(R.string.download_no_sdcard_dlg_title); return false; } // Check to see if we have an SDCard String status = Environment.getExternalStorageState(); if (!status.equals(Environment.MEDIA_MOUNTED)) { int title; // Check to see if the SDCard is busy, same as the music app if (status.equals(Environment.MEDIA_SHARED)) { title = R.string.download_sdcard_busy_dlg_title; } else { title = R.string.download_no_sdcard_dlg_title; } alertDownloadFailure(title); return false; } return true; } /** * Alerts user of download failure. * * @param code Error resource ID. */ private void alertDownloadFailure(int resId) { Toast.makeText(mContext, resId, Toast.LENGTH_SHORT).show(); } /** * Called when download starts. * * @param filename Name of the file. * @param mimeType MIME type of the content. */ @Override public void onDownloadStarted(String filename, String mimeType) { if (!isDangerousFile(filename, mimeType)) { showDownloadStartNotification(); closeBlankTab(); } } /** * Shows the download started notification. */ private void showDownloadStartNotification() { Toast.makeText(mContext, R.string.download_pending, Toast.LENGTH_SHORT).show(); } /** * If the given MIME type is null, or one of the "generic" types (text/plain * or application/octet-stream) map it to a type that Android can deal with. * If the given type is not generic, return it unchanged. * * We have to implement this ourselves as * MimeTypeMap.remapGenericMimeType() is not public. * See http://crbug.com/407829. * * @param mimeType MIME type provided by the server. * @param url URL of the data being loaded. * @param filename file name obtained from content disposition header * @return The MIME type that should be used for this data. */ private static String remapGenericMimeType(String mimeType, String url, String filename) { // If we have one of "generic" MIME types, try to deduce // the right MIME type from the file extension (if any): if (mimeType == null || mimeType.isEmpty() || "text/plain".equals(mimeType) || "application/octet-stream".equals(mimeType) || "octet/stream".equals(mimeType) || "application/force-download".equals(mimeType)) { if (!TextUtils.isEmpty(filename)) { url = filename; } String extension = MimeTypeMap.getFileExtensionFromUrl(url); String newMimeType = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension); if (newMimeType != null) { mimeType = newMimeType; } else if (extension.equals("dm")) { mimeType = OMADownloadHandler.OMA_DRM_MESSAGE_MIME; } else if (extension.equals("dd")) { mimeType = OMADownloadHandler.OMA_DOWNLOAD_DESCRIPTOR_MIME; } } return mimeType; } /** * Check whether a file is dangerous. * * @param filename Name of the file. * @param mimeType MIME type of the content. * @return true if the file is dangerous, or false otherwise. */ protected boolean isDangerousFile(String filename, String mimeType) { return nativeIsDownloadDangerous(filename) || isDangerousExtension( MimeTypeMap.getSingleton().getExtensionFromMimeType(mimeType)); } /** * Check whether a file extension is dangerous. * * @param ext Extension of the file. * @return true if the file is dangerous, or false otherwise. */ private static boolean isDangerousExtension(String ext) { return "apk".equals(ext); } /** * Discards a downloaded file. * * @param filepath File to be discarded. */ private static void discardFile(final String filepath) { new AsyncTask<Void, Void, Void>() { @Override public Void doInBackground(Void... params) { Log.d(LOGTAG, "Discarding download:" + filepath); File file = new File(filepath); if (file.exists() && !file.delete()) { Log.e(LOGTAG, "Error discarding file: " + filepath); } return null; } }.execute(); } /** * Close a blank tab just opened for the download purpose. * @return true iff the tab was closed. */ private boolean closeBlankTab() { WebContents contents = mTab.getWebContents(); boolean isInitialNavigation = contents == null || contents.getNavigationController().isInitialNavigation(); if (isInitialNavigation) { // Tab is created just for download, close it. return mTabModelSelector.closeTab(mTab); } return false; } /** * For certain download types(OMA for example), android DownloadManager should * handle them. Call this function to intercept those downloads. * * @param url URL to be downloaded. * @return whether the DownloadManager should intercept the download. */ public boolean shouldInterceptContextMenuDownload(String url) { Uri uri = Uri.parse(url); String scheme = uri.normalizeScheme().getScheme(); if (!"http".equals(scheme) && !"https".equals(scheme)) return false; String path = uri.getPath(); // OMA downloads have extension "dm" or "dd". For the latter, it // can be handled when native download completes. if (path != null && (path.endsWith(".dm"))) { final DownloadInfo downloadInfo = new DownloadInfo.Builder().setUrl(url).build(); if (mTab == null) return true; WindowAndroid window = mTab.getWindowAndroid(); if (window.hasPermission(permission.WRITE_EXTERNAL_STORAGE)) { onDownloadStartNoStream(downloadInfo); } else if (window.canRequestPermission(permission.WRITE_EXTERNAL_STORAGE)) { PermissionCallback permissionCallback = new PermissionCallback() { @Override public void onRequestPermissionsResult( String[] permissions, int[] grantResults) { if (grantResults[0] == PackageManager.PERMISSION_GRANTED) { onDownloadStartNoStream(downloadInfo); } } }; window.requestPermissions( new String[] {permission.WRITE_EXTERNAL_STORAGE}, permissionCallback); } return true; } return false; } protected Context getContext() { return mContext; } private static native String nativeGetDownloadWarningText(String filename); private static native boolean nativeIsDownloadDangerous(String filename); private static native void nativeDangerousDownloadValidated( Object tab, int downloadId, boolean accept); private static native void nativeLaunchDownloadOverwriteInfoBar(ChromeDownloadDelegate delegate, Tab tab, DownloadInfo downloadInfo, String fileName, String dirName, String dirFullPath); }
/* * Copyright 2014 Red Hat Inc. and/or its affiliates and other contributors. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.switchyard.component.common.knowledge.config.builder; import java.util.ArrayList; import java.util.List; import java.util.Properties; import javax.persistence.EntityManagerFactory; import javax.transaction.TransactionManager; import javax.transaction.UserTransaction; import org.drools.core.command.CommandService; import org.drools.core.impl.EnvironmentFactory; import org.jbpm.runtime.manager.impl.KModuleRegisterableItemsFactory; import org.jbpm.runtime.manager.impl.SimpleRuntimeEnvironment; import org.jbpm.runtime.manager.impl.deploy.DeploymentDescriptorImpl; import org.jbpm.runtime.manager.impl.deploy.DeploymentDescriptorManager; import org.jbpm.runtime.manager.impl.mapper.InMemoryMapper; import org.jbpm.runtime.manager.impl.mapper.JPAMapper; import org.jbpm.services.task.commands.TaskCommandExecutorImpl; import org.jbpm.services.task.events.TaskEventSupport; import org.jbpm.services.task.impl.command.CommandBasedTaskService; import org.kie.api.KieServices; import org.kie.api.builder.KieScanner; import org.kie.api.builder.ReleaseId; import org.kie.api.io.Resource; import org.kie.api.marshalling.ObjectMarshallingStrategy; import org.kie.api.runtime.Environment; import org.kie.api.runtime.EnvironmentName; import org.kie.api.runtime.KieContainer; import org.kie.api.runtime.manager.RegisterableItemsFactory; import org.kie.api.runtime.manager.RuntimeEnvironment; import org.kie.api.runtime.manager.RuntimeEnvironmentBuilderFactory; import org.kie.api.task.TaskService; import org.kie.api.task.UserGroupCallback; import org.kie.internal.runtime.conf.AuditMode; import org.kie.internal.runtime.conf.DeploymentDescriptor; import org.kie.internal.runtime.manager.InternalRegisterableItemsFactory; import org.kie.internal.runtime.manager.Mapper; import org.kie.internal.runtime.manager.TaskServiceFactory; import org.switchyard.ServiceDomain; import org.switchyard.common.type.reflect.Access; import org.switchyard.common.type.reflect.FieldAccess; import org.switchyard.component.common.knowledge.config.builder.patch.PatchedLocalTaskServiceFactory; import org.switchyard.component.common.knowledge.config.builder.patch.PatchedRuntimeEnvironmentBuilder; import org.switchyard.component.common.knowledge.config.items.CompoundRegisterableItemsFactory; import org.switchyard.component.common.knowledge.config.items.ExtendedRegisterableItemsFactory; import org.switchyard.component.common.knowledge.config.manifest.ContainerManifest; import org.switchyard.component.common.knowledge.config.manifest.Manifest; import org.switchyard.component.common.knowledge.config.manifest.RemoteManifest; import org.switchyard.component.common.knowledge.config.manifest.ResourcesManifest; import org.switchyard.component.common.knowledge.config.model.KnowledgeComponentImplementationModel; import org.switchyard.component.common.knowledge.serial.SerializerObjectMarshallingStrategy; import org.switchyard.component.common.knowledge.transaction.TransactionManagerLocator; import org.switchyard.serial.FormatType; import org.switchyard.serial.SerializerFactory; /** RuntimeEnvironmentBuilder. * * @author David Ward &lt;<a href="mailto:dward@jboss.org">dward@jboss.org</a>&gt; &copy; 2014 Red Hat Inc. */ public class RuntimeEnvironmentBuilder extends KnowledgeBuilder { private final KieServices _kieServices; private final RuntimeEnvironmentBuilderFactory _runtimeEnvironmentBuilderFactory; private final boolean _persistent; private final EntityManagerFactoryBuilder _entityManagerFactoryBuilder; private final ManifestBuilder _manifestBuilder; private final PropertiesBuilder _propertiesBuilder; private final UserGroupCallbackBuilder _userGroupCallbackBuilder; private final RegisterableItemsFactoryBuilder _registerableItemsFactoryBuilder; /** Creates a new RuntimeEnvironmentBuilder. * * @param classLoader classLoader * @param serviceDomain serviceDomain * @param implementationModel implementationModel */ public RuntimeEnvironmentBuilder(ClassLoader classLoader, ServiceDomain serviceDomain, KnowledgeComponentImplementationModel implementationModel) { super(classLoader, serviceDomain); _kieServices = KieServices.Factory.get(); // _runtimeEnvironmentBuilderFactory = org.kie.api.runtime.manager.RuntimeEnvironmentBuilder.Factory.get(); _runtimeEnvironmentBuilderFactory = new PatchedRuntimeEnvironmentBuilder(); _persistent = implementationModel != null ? implementationModel.isPersistent() : false; _entityManagerFactoryBuilder = new EntityManagerFactoryBuilder(serviceDomain, _persistent); _manifestBuilder = ManifestBuilder.builder(getClassLoader(), implementationModel); _propertiesBuilder = PropertiesBuilder.builder(implementationModel); _userGroupCallbackBuilder = UserGroupCallbackBuilder.builder(getClassLoader(), implementationModel); _registerableItemsFactoryBuilder = new RegisterableItemsFactoryBuilder(getClassLoader(), serviceDomain, implementationModel); } /** Builds a RuntimeEnvironment. * * @return a RuntimeEnvironment */ public RuntimeEnvironment build() { final org.kie.api.runtime.manager.RuntimeEnvironmentBuilder jbpmRuntimeEnvironmentBuilder; Manifest manifest = _manifestBuilder.build(); if (manifest instanceof RemoteManifest) { RemoteManifest remoteManifest = (RemoteManifest)manifest; jbpmRuntimeEnvironmentBuilder = _runtimeEnvironmentBuilderFactory.newDefaultInMemoryBuilder(); remoteManifest.addToEnvironment(jbpmRuntimeEnvironmentBuilder); // we dont' do any other building for remote usage return jbpmRuntimeEnvironmentBuilder.get(); } else if (manifest instanceof ContainerManifest) { ContainerManifest containerManifest = (ContainerManifest)manifest; String baseName = containerManifest.getBaseName(); ReleaseId releaseId = containerManifest.getReleaseId(); String sessionName = containerManifest.getSessionName(); if (releaseId != null) { if (baseName != null || sessionName != null) { jbpmRuntimeEnvironmentBuilder = _runtimeEnvironmentBuilderFactory.newDefaultBuilder(releaseId, baseName, sessionName); } else { jbpmRuntimeEnvironmentBuilder = _runtimeEnvironmentBuilderFactory.newDefaultBuilder(releaseId); } // we can't update classpath containers, so no point adding it to environment below here containerManifest.addToEnvironment(jbpmRuntimeEnvironmentBuilder); } else if (baseName != null || sessionName != null) { jbpmRuntimeEnvironmentBuilder = _runtimeEnvironmentBuilderFactory.newClasspathKmoduleDefaultBuilder(baseName, sessionName); } else { jbpmRuntimeEnvironmentBuilder = _runtimeEnvironmentBuilderFactory.newClasspathKmoduleDefaultBuilder(); } } else { if (_persistent) { jbpmRuntimeEnvironmentBuilder = _runtimeEnvironmentBuilderFactory.newDefaultBuilder(); } else { jbpmRuntimeEnvironmentBuilder = _runtimeEnvironmentBuilderFactory.newDefaultInMemoryBuilder(); } if (manifest instanceof ResourcesManifest) { ResourcesManifest resourcesManifest = (ResourcesManifest)manifest; for (Resource resource : resourcesManifest.buildResources()) { jbpmRuntimeEnvironmentBuilder.addAsset(resource, resource.getResourceType()); } } } jbpmRuntimeEnvironmentBuilder.classLoader(getClassLoader()); jbpmRuntimeEnvironmentBuilder.persistence(_persistent); // provides a noop EntityManagerFactory if no persistence EntityManagerFactory entityManagerFactory = _entityManagerFactoryBuilder.build(); jbpmRuntimeEnvironmentBuilder.entityManagerFactory(entityManagerFactory); jbpmRuntimeEnvironmentBuilder.addEnvironmentEntry(EnvironmentName.ENTITY_MANAGER_FACTORY, entityManagerFactory); // provides an ootb UserGroupCallback if fallen-back to UserGroupCallback userGroupCallback = _userGroupCallbackBuilder.build(); jbpmRuntimeEnvironmentBuilder.userGroupCallback(userGroupCallback); if (_persistent) { UserTransaction ut = TransactionManagerLocator.INSTANCE.getUserTransaction(); TransactionManager tm = TransactionManagerLocator.INSTANCE.getTransactionManager(); jbpmRuntimeEnvironmentBuilder.addEnvironmentEntry(EnvironmentName.TRANSACTION, ut); jbpmRuntimeEnvironmentBuilder.addEnvironmentEntry(EnvironmentName.TRANSACTION_MANAGER, tm); } else { // TODO: why, when no persistence, do we have to do all this? jbpmRuntimeEnvironmentBuilder.addEnvironmentEntry("IS_JTA_TRANSACTION", Boolean.FALSE); TaskEventSupport taskEventSupport = new TaskEventSupport(); CommandService executor = new TaskCommandExecutorImpl(EnvironmentFactory.newEnvironment(), taskEventSupport); jbpmRuntimeEnvironmentBuilder.addEnvironmentEntry(TaskService.class.getName(), new CommandBasedTaskService(executor, taskEventSupport)); } Properties properties = _propertiesBuilder.build(); for (Object key : properties.keySet()) { String name = (String)key; String value = properties.getProperty(name); jbpmRuntimeEnvironmentBuilder.addConfiguration(name, value); // add to KieSessionConfiguration jbpmRuntimeEnvironmentBuilder.addEnvironmentEntry(name, value); // add to Environment (SWITCHYARD-2393) } // things that need to be done to the original RuntimeEnvironment before the jBPM RuntimeEnvironmentBuilder is built (get->init) /* * Access<SimpleRuntimeEnvironment> simpleREAccess = new FieldAccess<SimpleRuntimeEnvironment>( * org.jbpm.runtime.manager.impl.RuntimeEnvironmentBuilder.class, "runtimeEnvironment"); * if (simpleREAccess.isReadable()) { * SimpleRuntimeEnvironment originalRE = simpleREAccess.read(jbpmRuntimeEnvironmentBuilder); */ KieScanner scanner = null; SimpleRuntimeEnvironment originalRE = ((PatchedRuntimeEnvironmentBuilder)jbpmRuntimeEnvironmentBuilder).getRuntimeEnvironment(); if (originalRE != null) { RegisterableItemsFactory originalRIF = originalRE.getRegisterableItemsFactory(); if (originalRIF instanceof InternalRegisterableItemsFactory) { ExtendedRegisterableItemsFactory extendedRIF = _registerableItemsFactoryBuilder.build(); CompoundRegisterableItemsFactory compoundRIF = new CompoundRegisterableItemsFactory((InternalRegisterableItemsFactory)originalRIF, extendedRIF); jbpmRuntimeEnvironmentBuilder.registerableItemsFactory(compoundRIF); ExtendedRegisterableItemsFactory.Env.addToEnvironment(jbpmRuntimeEnvironmentBuilder, compoundRIF); if (manifest instanceof ContainerManifest && originalRIF instanceof KModuleRegisterableItemsFactory) { Access<KieContainer> kieContainerAccess = new FieldAccess<KieContainer>(KModuleRegisterableItemsFactory.class, "kieContainer"); if (kieContainerAccess.isReadable()) { KieContainer kieContainer = kieContainerAccess.read(originalRIF); ((ContainerManifest)manifest).setKieContainer(kieContainer); if (((ContainerManifest)manifest).isScan()) { scanner = _kieServices.newKieScanner(kieContainer); scanner.start(((ContainerManifest)manifest).getScanInterval().longValue()); } } } } Mapper mapper; AuditMode auditMode; if (_persistent) { mapper = new JPAMapper(entityManagerFactory); auditMode = AuditMode.JPA; } else { mapper = new InMemoryMapper(); auditMode = AuditMode.NONE; } originalRE.setMapper(mapper); Environment environmentTemplate = originalRE.getEnvironmentTemplate(); // set the patched LocalTaskServiceFactory originalRE.addToEnvironment(TaskServiceFactory.class.getName(), new PatchedLocalTaskServiceFactory(originalRE)); // TODO: why, when no persistence, do we have to do all this? DeploymentDescriptor deploymentDescriptor = (DeploymentDescriptor)environmentTemplate.get("KieDeploymentDescriptor"); if (deploymentDescriptor == null) { deploymentDescriptor = new DeploymentDescriptorManager().getDefaultDescriptor(); originalRE.addToEnvironment("KieDeploymentDescriptor", deploymentDescriptor); } originalRE.addToEnvironment(manifest.getClass().getName(), manifest); ((DeploymentDescriptorImpl)deploymentDescriptor).setAuditMode(auditMode); if (scanner != null) { originalRE.addToEnvironment("KieScanner", scanner); } } /* * } */ RuntimeEnvironment runtimeEnvironment = jbpmRuntimeEnvironmentBuilder.get(); Environment environment = originalRE.getEnvironmentTemplate(); // our ObjectMarshallingStrategy can be added to the Environment after the jBPM RuntimeEnvironmentBuilder is built (get->init) List<ObjectMarshallingStrategy> new_oms = new ArrayList<ObjectMarshallingStrategy>(); new_oms.add(new SerializerObjectMarshallingStrategy(SerializerFactory.create(FormatType.JSON, null, true))); ObjectMarshallingStrategy[] old_oms = (ObjectMarshallingStrategy[])environment.get(EnvironmentName.OBJECT_MARSHALLING_STRATEGIES); if (old_oms != null) { for (int i = 0; i < old_oms.length; i++) { if (old_oms[i] != null) { new_oms.add(old_oms[i]); } } } originalRE.addToEnvironment(EnvironmentName.OBJECT_MARSHALLING_STRATEGIES, new_oms.toArray(new ObjectMarshallingStrategy[new_oms.size()])); return runtimeEnvironment; } }
package shedar.mods.ic2.nuclearcontrol.crossmod.appeng; import appeng.api.AEApi; import appeng.api.implementations.tiles.IChestOrDrive; import appeng.api.networking.GridFlags; import appeng.api.networking.IGrid; import appeng.api.networking.IGridHost; import appeng.api.networking.IGridNode; import appeng.api.networking.events.MENetworkCellArrayUpdate; import appeng.api.networking.events.MENetworkEventSubscribe; import appeng.api.networking.events.MENetworkStorageEvent; import appeng.api.storage.ICellInventory; import appeng.api.storage.ICellInventoryHandler; import appeng.api.storage.IMEInventoryHandler; import appeng.api.storage.StorageChannel; import appeng.api.util.AECableType; import appeng.api.util.DimensionalCoord; import appeng.tile.grid.AENetworkTile; import appeng.tile.storage.TileChest; import appeng.tile.storage.TileDrive; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.network.NetworkManager; import net.minecraft.network.Packet; import net.minecraft.network.play.server.S35PacketUpdateTileEntity; import net.minecraft.tileentity.TileEntity; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; import shedar.mods.ic2.nuclearcontrol.utils.NCLog; import java.util.ArrayList; import java.util.List; public class TileEntityNetworkLink extends AENetworkTile { private static int TOTALBYTES = 0; private static int USEDBYTES = 0; private static int ITEMTYPETOTAL = 0; private static int USEDITEMTYPE = 0; public TileEntityNetworkLink(){ this.getProxy().setFlags( GridFlags.REQUIRE_CHANNEL ); } @Override public DimensionalCoord getLocation() { return new DimensionalCoord( this ); } @Override public AECableType getCableConnectionType(ForgeDirection forgeDirection) { return AECableType.SMART; } //@TileEvent(TileEventType.TICK) public void updateNetworkCache(){ int CacheByteT = 0; int CacheByte = 0; int CacheItemT = 0; int CacheItem = 0; List<TileEntity> tileEntity = getTiles(); //NCLog.fatal("SIZE: " + tileEntity.size()); for(int i = 0; i < tileEntity.size(); i++){ TileEntity tile = tileEntity.get(i); //NCLog.error(tile); //NCLog.fatal("x: " + tile.xCoord + " y: " + tile.yCoord + " z: " + tile.zCoord); //NCLog.fatal(tileEntity.get(1).xCoord +"."+tileEntity.get(1).yCoord +"."+tileEntity.get(1).zCoord); if(tile instanceof TileDrive){ TileDrive drive = (TileDrive) tile; //NCLog.fatal("DRIVENULL: " + drive == null); for( int x = 0; x < drive.getInternalInventory().getSizeInventory(); x++ ){ ItemStack is = drive.getInternalInventory().getStackInSlot(x); //NCLog.fatal("IS NULL: " + is == null); //NCLog.error(is.getItem()); //NCLog.error(is.getItem().getClass()); if(is != null){ IMEInventoryHandler inventory = AEApi.instance().registries().cell().getCellInventory( is, null, StorageChannel.ITEMS ); if(inventory instanceof ICellInventoryHandler){ ICellInventoryHandler handler = (ICellInventoryHandler) inventory; ICellInventory cellInventory = handler.getCellInv(); //ICellInventory inv = (ICellInventory) is.getItem(); if( cellInventory != null ) { CacheByteT += cellInventory.getTotalBytes(); CacheByte += cellInventory.getUsedBytes(); CacheItemT += cellInventory.getTotalItemTypes(); CacheItem += cellInventory.getStoredItemTypes(); } } } } } else if(tile instanceof TileChest){ TileChest chest = (TileChest) tile; ItemStack is = chest.getInternalInventory().getStackInSlot(0); if(is != null){ IMEInventoryHandler inventory = AEApi.instance().registries().cell().getCellInventory( is, null, StorageChannel.ITEMS ); if(inventory instanceof ICellInventoryHandler){ ICellInventoryHandler handler = (ICellInventoryHandler) inventory; ICellInventory cellInventory = handler.getCellInv(); if(cellInventory != null){ CacheByteT += cellInventory.getTotalBytes(); CacheByte += cellInventory.getUsedBytes(); CacheItemT += cellInventory.getTotalItemTypes(); CacheItem += cellInventory.getStoredItemTypes(); } } } } } if(CacheByteT != TOTALBYTES) TOTALBYTES = CacheByteT; if(CacheByte != USEDBYTES) USEDBYTES = CacheByte; if(CacheItemT != ITEMTYPETOTAL) ITEMTYPETOTAL = CacheItemT; if(CacheItem != USEDITEMTYPE) USEDITEMTYPE = CacheItem; //NCLog.fatal("Total: " + TOTALBYTES); } private List<TileEntity> getTiles(){ //List<ICellContainer> list = new ArrayList<ICellContainer>(); List<TileEntity> list = new ArrayList<TileEntity>(); //IGridNode gridNode = this.getGridNode(ForgeDirection.UNKNOWN); try { //IGrid grid = gridNode.getGrid(); IGrid grid = this.getProxy().getNode().getGrid(); for (Class<? extends IGridHost> clazz : grid.getMachinesClasses()) { for (Class clazz2 : clazz.getInterfaces()) { //NCLog.fatal("Passed Class 2"); //NCLog.fatal(clazz2); if (clazz2 == IChestOrDrive.class) { //NCLog.fatal("Passed If is IChestorDrive"); //NCLog.fatal(grid.getMachines(TileDrive.class)); for (IGridNode con : grid.getMachines(TileDrive.class)) { //list.add((ICellContainer) con.getMachine()); list.add(getBaseTileEntity(con.getGridBlock().getLocation()));//.getMachine().getGridNode(ForgeDirection.UNKNOWN) } //for (IGridNode con : grid.getMachines(TileChest.class)) { // list.add(getBaseTileEntity(con.getGridBlock().getLocation())); //} } } } }catch (Exception e){} return list; } private static TileEntity getBaseTileEntity(DimensionalCoord coord){ if(coord == null) { NCLog.fatal("Coord is null"); return null; } World world = coord.getWorld(); if(world == null) { NCLog.fatal("World is null?"); return null; } // NCLog.fatal("RETURNED Safely"); return world.getTileEntity(coord.x, coord.y, coord.z); } @MENetworkEventSubscribe public void updateviaCellEvent(MENetworkCellArrayUpdate e){ //NCLog.error("THE CALL!"); this.updateNetworkCache(); } @MENetworkEventSubscribe public void updateviaStorageEvent(MENetworkStorageEvent e){ //NCLog.error("THE CALL!"); this.updateNetworkCache(); } @Override public Packet getDescriptionPacket() { NBTTagCompound syncData = new NBTTagCompound(); syncData.setInteger("TotalBytes", TOTALBYTES); syncData.setInteger("UsedBytes", USEDBYTES); syncData.setInteger("TotalItems", ITEMTYPETOTAL); syncData.setInteger("UsedItems", USEDITEMTYPE); return new S35PacketUpdateTileEntity(this.xCoord, this.yCoord, this.zCoord, 1, syncData); } @Override public void onDataPacket(NetworkManager net, S35PacketUpdateTileEntity pkt) { TOTALBYTES = pkt.func_148857_g().getInteger("TotalBytes"); USEDBYTES = pkt.func_148857_g().getInteger("UsedBytes"); ITEMTYPETOTAL = pkt.func_148857_g().getInteger("TotalItems"); USEDITEMTYPE = pkt.func_148857_g().getInteger("UsedItems"); } public static int getTOTALBYTES() { return TOTALBYTES; } public static int getUSEDBYTES() { return USEDBYTES; } public static int getITEMTYPETOTAL() { return ITEMTYPETOTAL; } public static int getUSEDITEMTYPE() { return USEDITEMTYPE; } }
/* * Copyright 2016-2020 chronicle.software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.performance.tests.vanilla.tcp; import net.openhft.affinity.Affinity; import net.openhft.chronicle.network.tcp.ChronicleSocketChannel; import net.openhft.chronicle.network.tcp.ChronicleSocketChannelFactory; import org.jetbrains.annotations.NotNull; import java.io.Closeable; import java.io.EOFException; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.ClosedChannelException; import java.util.Arrays; import java.util.Random; /** * @author peter.lawrey */ /* Both ends are run with -Xmx64m -verbose:gc ///////// EchoServerMain On a E5-2650 v2 over loopback with onload Throughput was 2880.4 MB/s Loop back echo latency was 5.8/6.2 9.6/19.4 23.2us for 50/90 99/99.9 99.99%tile On an i7-3970X over loopback Starting throughput test Throughput was 2333.0 MB/s Loop back echo latency was 7.5/16.7 28/41 55/64 69 us for 50/90 99/99.9 99.99/99.999 worst %tile Two connections Throughput was 2345.4 MB/s, clients=2 2 clients: Loop back echo latency was 7.8/17.4 29/42 54/63 69 us for 50/90 99/99.9 99.99/99.999 worst %tile Three connections Throughput was 2396.6 MB/s, clients=3 3 clients: Loop back echo latency was 8.3/18.0 30/43 56/67 84 us for 50/90 99/99.9 99.99/99.999 worst %tile Four connections Throughput was 2411.5 MB/s, clients=4 4 clients: Loop back echo latency was 8.2/17.7 30/43 56/70 90 us for 50/90 99/99.9 99.99/99.999 worst %tile Six connections Throughput was 2437.7 MB/s, clients=6 Starting latency test rate: 100000 6 clients: Loop back echo latency was 11.4/25.9 46/67 85/102 123 us for 50/90 99/99.9 99.99/99.999 worst %tile Starting latency test rate: 70000 6 clients: Loop back echo latency was 8.5/15.4 25/35 45/56 76 us for 50/90 99/99.9 99.99/99.999 worst %tile Eight connections Throughput was 2479.7 MB/s, clients=8 Starting latency test rate: 100000 8 clients: Loop back echo latency was 21.1/57.2 109/161 214/249 271 us for 50/90 99/99.9 99.99/99.999 worst %tile Starting latency test rate: 70000 8 clients: Loop back echo latency was 9.6/18.8 32/45 58/73 102 us for 50/90 99/99.9 99.99/99.999 worst %tile Ten connections Throughput was 2490.0 MB/s, clients=10 Starting latency test rate: 70000 Average time 13 10 clients: Loop back echo latency was 10.7/23.0 40/57 74/88 108 us for 50/90 99/99.9 99.99/99.999 worst %tile 14 connections Throughput was 2494.3 MB/s, clients=14 Starting latency test rate: 70000 Average time 24 14 clients: Loop back echo latency was 18.9/47.5 88/129 169/202 245 us for 50/90 99/99.9 99.99/99.999 worst %tile Starting latency test rate: 50000 Average time 14 14 clients: Loop back echo latency was 11.9/23.2 40/57 603/1717 2,018 us for 50/90 99/99.9 99.99/99.999 worst %tile 20 connections Throughput was 2513.6 MB/s, clients=20 Starting latency test rate: 50000 20 clients: Loop back echo latency was 17.5/43.9 80/118 161/1581 2,028 us for 50/90 99/99.9 99.99/99.999 worst %tile Starting latency test rate: 30000 20 clients: Loop back echo latency was 13.5/24.5 41/59 1,057/1693 1,967 us for 50/90 99/99.9 99.99/99.999 worst %tile Between two servers via Solarflare with onload on server & client (no minor GCs) Throughput was 1156.0 MB/s Loop back echo latency was 12.2/12.5 21/25 28/465 us for 50/90 99/99.9 99.99/worst %tile Between two servers via Solarflare with onload on client (no minor GCs) Throughput was 867.5 MB/s Loop back echo latency was 15.0/15.7 21/27 30/398 us for 50/90 99/99.9 99.99/worst %tile //////// EchServerMain with lowlatency kernel Throughput was 2450.3 MB/s Starting latency test rate: 100000 Loop back echo latency was 9.1/20.5 35/50 65/76 81 us for 50/90 99/99.9 99.99/99.999 worst %tile With 2 clients Throughput was 2868.9 MB/s Starting latency test rate: 100000 Loop back echo latency was 9.9/22.0 38/55 75/134 159 us for 50/90 99/99.9 99.99/99.999 worst %tile //////// NettyEchoServer Between two servers via Solarflare with onload on server & client (16 minor GCs) Throughput was 968.7 MB/s Loop back echo latency was 18.4/19.0 26/31 33/1236 us for 50/90 99/99.9 99.99/worst %tile Between two servers via Solarflare with onload on client (16 minor GCs) Throughput was 643.6 MB/s Loop back echo latency was 20.8/21.8 29/34 38/2286 us for 50/90 99/99.9 99.99/worst %tile */ public class EchoClientMain { public static final int PORT = Integer.getInteger("port", 8007); public static final int CLIENTS = Integer.getInteger("clients", 1); public static final int TARGET_THROUGHPUT = Integer.getInteger("throughput", 20_000); public static final int CPU = Integer.getInteger("cpu", 0); public static void main(@NotNull String... args) throws IOException { if (CPU == 0) { Affinity.acquireCore(); } else { System.out.println("Binding to CPU " + CPU); Affinity.setAffinity(CPU); } @NotNull String[] hostnames = args.length > 0 ? args : "localhost".split(","); @NotNull ChronicleSocketChannel[] sockets = new ChronicleSocketChannel[CLIENTS]; openConnections(hostnames, PORT, sockets); testThroughput(sockets); closeConnections(sockets); openConnections(hostnames, PORT, sockets); for (int i : new int[]{/*200_000, 150_000, 120_000,*/ 100_000, 80_000, 60_000, 50_000, 40_000, 30_000, 20_000}) testByteLatency(i, sockets); closeConnections(sockets); } private static void openConnections(@NotNull String[] hostname, int port, @NotNull ChronicleSocketChannel... sockets) throws IOException { for (int j = 0; j < sockets.length; j++) { sockets[j] = ChronicleSocketChannelFactory.wrap(new InetSocketAddress(hostname[j % hostname.length], port)); sockets[j].socket().setTcpNoDelay(true); sockets[j].configureBlocking(false); } } private static void closeConnections(@NotNull ChronicleSocketChannel... sockets) throws IOException { for (@NotNull Closeable socket : sockets) socket.close(); } private static void testThroughput(@NotNull ChronicleSocketChannel... sockets) throws IOException { System.out.println("Starting throughput test, clients=" + CLIENTS); int bufferSize = 16 * 1024; ByteBuffer bb = ByteBuffer.allocateDirect(bufferSize).order(ByteOrder.LITTLE_ENDIAN); int count = 0, window = 8; long start = System.nanoTime(); while (System.nanoTime() - start < 10e9) { for (@NotNull ChronicleSocketChannel socket : sockets) { bb.clear(); bb.putInt(0, bb.limit()); if (socket.write(bb) < 0) throw new AssertionError("Socket " + socket + " unable to write in one go."); } if (count >= window) for (@NotNull ChronicleSocketChannel socket : sockets) { bb.clear(); while (socket.read(bb) >= 0 && bb.remaining() > 0) ; } count++; } for (@NotNull ChronicleSocketChannel socket : sockets) { try { do { bb.clear(); } while (socket.read(bb) > 0); } catch (ClosedChannelException expected) { } } long time = System.nanoTime() - start; System.out.printf("Throughput was %.1f MB/s, clients=%d%n", 1e3 * count * bufferSize * sockets.length / time, CLIENTS); } private static void testByteLatency(int targetThroughput, @NotNull ChronicleSocketChannel... sockets) throws IOException { System.out.println("Starting latency test rate: " + targetThroughput); int tests = Math.max(1000, Math.min(300 * targetThroughput, 5_000_000)); @NotNull long[] times = new long[tests * sockets.length]; int count = 0; long now = System.nanoTime(); int interval = (int) (1e9 * sockets.length / targetThroughput); ByteBuffer bb = ByteBuffer.allocateDirect(40).order(ByteOrder.LITTLE_ENDIAN); bb.putInt(0, bb.limit()); bb.putInt(4, 0x12345678); @NotNull Random rand = new Random(); @NotNull long[] start = new long[sockets.length]; for (int i = Math.max(-20000, -targetThroughput); i < tests; i += sockets.length) { now += rand.nextInt(2 * interval); while (System.nanoTime() < now) ; long next = now; for (int j = 0; j < sockets.length; j++) { ChronicleSocketChannel socket = sockets[j]; start[j] = next; long start0 = System.nanoTime(); bb.position(0); while (bb.remaining() > 0) if (socket.write(bb) < 0) throw new EOFException(); next = System.nanoTime() - start0; } for (int j = 0; j < sockets.length; j++) { ChronicleSocketChannel socket = sockets[j]; bb.position(0); while (bb.remaining() > 0) if (socket.read(bb) < 0) throw new EOFException(); if (bb.getInt(4) != 0x12345678) throw new AssertionError("read error"); if (i >= 0) times[count++] = System.nanoTime() - start[j]; } } System.out.println("Average time " + (Arrays.stream(times).sum() / times.length) / 1000); Arrays.sort(times); System.out.printf("%d clients: Loop back echo latency was %.1f/%.1f %,d/%,d %,d/%d %,d us for 50/90 99/99.9 99.99/99.999 worst %%tile%n", CLIENTS, times[times.length / 2] / 1e3, times[times.length * 9 / 10] / 1e3, times[times.length - times.length / 100] / 1000, times[times.length - times.length / 1000] / 1000, times[times.length - times.length / 10000 - 1] / 1000, times[times.length - times.length / 100000 - 1] / 1000, times[times.length - 1] / 1000 ); } }
/** * $Revision: 1.0 * $Date: 2013-5-21 * * Copyright (C) 2013-2020 MNT. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.mnt.base.util; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Providing all configuration items for server. * * @author Peng Peng * #date 2016-02-11 * #updated 2016-02-11 * */ public class ClientConfiguration { private static final Log log = LogFactory.getLog(ClientConfiguration.class); private static final ClientConfiguration instance = new ClientConfiguration(); protected static Properties prop = new Properties(); static { loadConfig(ItemKeyDef.V_CONFIG_FILE_PATH); } protected ClientConfiguration() { // empty } protected ClientConfiguration(String confPath) { loadConfig(confPath); } public static ClientConfiguration getInstance() { return instance; } /** * get the string configuration value, if no corresponding config-item, return null. * * @param key * @return */ public static String getProperty(String key) { return prop.getProperty(key); } /** * get the string configuration value, if no corresponding config-item, return defaultValue. * * @param key * @param defaultValue * @return */ public static String getProperty(String key, String defaultValue) { String resultVal = prop.getProperty(key); return CommonUtil.isEmpty(resultVal) ? defaultValue : resultVal; } /** * get the configuration value as float, if no corresponding config-item, return 0. * * @param key * @return */ public static int getIntProperty(String key) { return getIntProperty(key, 0); } /** * get the configuration value as float, if no corresponding config-item, return defaultValue. * * @param key * @param defaultValue * @return */ public static int getIntProperty(String key, int defaultValue) { return CommonUtil.parseAsInt(prop.getProperty(key), defaultValue); } /** * get the configuration value as float, if no corresponding config-item, return 0. * * @param key * @return */ public static float getFloatProperty(String key) { return getFloatProperty(key, 0); } /** * get the configuration value as float, if no corresponding config-item, return defaultValue. * * @param key * @param defaultValue * @return */ public static float getFloatProperty(String key, float defaultValue) { return CommonUtil.parseAsFloat(prop.getProperty(key), defaultValue); } /** * get the configuration value as long, if no corresponding config-item, return 0. * * @param key * @return */ public static long getLongProperty(String key) { return getLongProperty(key, 0); } /** * get the configuration value as long, if no corresponding config-item, return defaultValue. * * @param key * @param defaultValue * @return */ public static long getLongProperty(String key, long defaultValue) { return CommonUtil.parseAsLong(prop.getProperty(key), defaultValue); } /** * get the configuration value as double, if no corresponding config-item, return 0. * * @param key * @return */ public static double getDoubleProperty(String key) { return getDoubleProperty(key, 0); } /** * get the configuration value as double, if no corresponding config-item, return defaultValue. * * @param key * @param defaultValue * @return */ public static double getDoubleProperty(String key, double defaultValue) { return CommonUtil.parseAsDouble(prop.getProperty(key), defaultValue); } /** * get the configuration value as boolean, if no corresponding config-item, return false. * * @param key * @return */ public static boolean getBoolProperty(String key) { return getBoolProperty(key, false); } /** * parse the configuration value as boolean, if no corresponding config-item, return the default value. * * @param key * @param defaultValue * @return */ public static boolean getBoolProperty(String key, boolean defaultValue) { return CommonUtil.parseAsBoolean(prop.getProperty(key), defaultValue); } public static Map<String, String> loadKeyValuePairs(InputStream in) { Map<String, String> keyPairs = new HashMap<String, String>(); if(in != null) { Properties prop = new Properties(); try { prop.load(in); } catch (IOException e) { log.error("error while load keyvalue pairs to properties.", e); } finally { try { in.close(); } catch (IOException e) { log.error("error while close the properties file: ", e); } } for(String key : prop.stringPropertyNames()) { keyPairs.put(key, prop.getProperty(key)); } } return keyPairs; } /** * init the configuration properties base on specfied confPath, by default no need to explicitly invoke this method * * @param confPath */ protected static void loadConfig(String confPath) { InputStream in = getRelativeFileStream(confPath); if(in != null) { try { prop.load(in); } catch (IOException e) { log.error("error while load prop dataa from conf: " + confPath); } } else if(log.isDebugEnabled()) { log.debug("no corresponding conf file, skip to setup configuraiton for file path: " + confPath); } } /** * Load the properties key value pairs to hash map. * the properties configuration path is based on specified server home. * * @param confPath * @return */ public static Map<String, String> loadKeyValuePairs(String confPath) { return loadKeyValuePairs(getRelativeFileStream(confPath)); } /** * return the conf related file input stream, if the server conf is home configured mode, use the home related mode, * else use the class path mode * * @param confPath * @return */ public static InputStream getRelativeFileStream(String confPath) { InputStream in = ClientConfiguration.class.getClassLoader().getResourceAsStream(confPath); return in; } private interface ItemKeyDef { String V_CONFIG_FILE_PATH = "client.conf"; } }
/** * Copyright 2010-2015 Axel Fontaine * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flywaydb.core.internal.command; import org.flywaydb.core.api.callback.FlywayCallback; import org.flywaydb.core.api.FlywayException; import org.flywaydb.core.api.MigrationInfo; import org.flywaydb.core.api.MigrationState; import org.flywaydb.core.api.MigrationVersion; import org.flywaydb.core.api.resolver.MigrationExecutor; import org.flywaydb.core.api.resolver.MigrationResolver; import org.flywaydb.core.internal.dbsupport.DbSupport; import org.flywaydb.core.internal.dbsupport.Schema; import org.flywaydb.core.internal.info.MigrationInfoImpl; import org.flywaydb.core.internal.info.MigrationInfoServiceImpl; import org.flywaydb.core.internal.metadatatable.AppliedMigration; import org.flywaydb.core.internal.metadatatable.MetaDataTable; import org.flywaydb.core.internal.util.StopWatch; import org.flywaydb.core.internal.util.TimeFormat; import org.flywaydb.core.internal.util.jdbc.TransactionCallback; import org.flywaydb.core.internal.util.jdbc.TransactionTemplate; import org.flywaydb.core.internal.util.logging.Log; import org.flywaydb.core.internal.util.logging.LogFactory; import java.sql.Connection; import java.sql.SQLException; /** * Main workflow for migrating the database. * * @author Axel Fontaine */ public class DbMigrate { private static final Log LOG = LogFactory.getLog(DbMigrate.class); /** * The target version of the migration. */ private final MigrationVersion target; /** * Database-specific functionality. */ private final DbSupport dbSupport; /** * The database metadata table. */ private final MetaDataTable metaDataTable; /** * The schema containing the metadata table. */ private final Schema schema; /** * The migration resolver. */ private final MigrationResolver migrationResolver; /** * The connection to use. */ private final Connection connectionMetaDataTable; /** * The connection to use to perform the actual database migrations. */ private final Connection connectionUserObjects; /** * Flag whether to ignore failed future migrations or not. */ private final boolean ignoreFailedFutureMigration; /** * Allows migrations to be run "out of order". * <p>If you already have versions 1 and 3 applied, and now a version 2 is found, * it will be applied too instead of being ignored.</p> * <p>(default: {@code false})</p> */ private final boolean outOfOrder; /** * This is a list of callbacks that fire before or after the migrate task is executed. * You can add as many callbacks as you want. These should be set on the Flyway class * by the end user as Flyway will set them automatically for you here. */ private final FlywayCallback[] callbacks; /** * Creates a new database migrator. * * @param connectionMetaDataTable The connection to use. * @param connectionUserObjects The connection to use to perform the actual database migrations. * @param dbSupport Database-specific functionality. * @param metaDataTable The database metadata table. * @param migrationResolver The migration resolver. * @param target The target version of the migration. * @param ignoreFailedFutureMigration Flag whether to ignore failed future migrations or not. * @param outOfOrder Allows migrations to be run "out of order". */ public DbMigrate(Connection connectionMetaDataTable, Connection connectionUserObjects, DbSupport dbSupport, MetaDataTable metaDataTable, Schema schema, MigrationResolver migrationResolver, MigrationVersion target, boolean ignoreFailedFutureMigration, boolean outOfOrder, FlywayCallback[] callbacks) { this.connectionMetaDataTable = connectionMetaDataTable; this.connectionUserObjects = connectionUserObjects; this.dbSupport = dbSupport; this.metaDataTable = metaDataTable; this.schema = schema; this.migrationResolver = migrationResolver; this.target = target; this.ignoreFailedFutureMigration = ignoreFailedFutureMigration; this.outOfOrder = outOfOrder; this.callbacks = callbacks; } /** * Starts the actual migration. * * @return The number of successfully applied migrations. * @throws FlywayException when migration failed. */ public int migrate() throws FlywayException { for (final FlywayCallback callback : callbacks) { new TransactionTemplate(connectionUserObjects).execute(new TransactionCallback<Object>() { @Override public Object doInTransaction() throws SQLException { callback.beforeMigrate(connectionUserObjects); return null; } }); } StopWatch stopWatch = new StopWatch(); stopWatch.start(); int migrationSuccessCount = 0; while (true) { final boolean firstRun = migrationSuccessCount == 0; MigrationVersion result = new TransactionTemplate(connectionMetaDataTable, false).execute(new TransactionCallback<MigrationVersion>() { public MigrationVersion doInTransaction() { metaDataTable.lock(); MigrationInfoServiceImpl infoService = new MigrationInfoServiceImpl(migrationResolver, metaDataTable, target, outOfOrder, true); infoService.refresh(); MigrationVersion currentSchemaVersion = MigrationVersion.EMPTY; if (infoService.current() != null) { currentSchemaVersion = infoService.current().getVersion(); } if (firstRun) { LOG.info("Current version of schema " + schema + ": " + currentSchemaVersion); if (outOfOrder) { LOG.warn("outOfOrder mode is active. Migration of schema " + schema + " may not be reproducible."); } } MigrationInfo[] future = infoService.future(); if (future.length > 0) { MigrationInfo[] resolved = infoService.resolved(); if (resolved.length == 0) { LOG.warn("Schema " + schema + " has version " + currentSchemaVersion + ", but no migration could be resolved in the configured locations !"); } else { LOG.warn("Schema " + schema + " has a version (" + currentSchemaVersion + ") that is newer than the latest available migration (" + resolved[resolved.length - 1].getVersion() + ") !"); } } MigrationInfo[] failed = infoService.failed(); if (failed.length > 0) { if ((failed.length == 1) && (failed[0].getState() == MigrationState.FUTURE_FAILED) && ignoreFailedFutureMigration) { LOG.warn("Schema " + schema + " contains a failed future migration to version " + failed[0].getVersion() + " !"); } else { throw new FlywayException("Schema " + schema + " contains a failed migration to version " + failed[0].getVersion() + " !"); } } MigrationInfoImpl[] pendingMigrations = infoService.pending(); if (pendingMigrations.length == 0) { return null; } boolean isOutOfOrder = pendingMigrations[0].getVersion().compareTo(currentSchemaVersion) < 0; return applyMigration(pendingMigrations[0], isOutOfOrder); } }); if (result == null) { // No further migrations available break; } migrationSuccessCount++; } stopWatch.stop(); logSummary(migrationSuccessCount, stopWatch.getTotalTimeMillis()); for (final FlywayCallback callback : callbacks) { new TransactionTemplate(connectionUserObjects).execute(new TransactionCallback<Object>() { @Override public Object doInTransaction() throws SQLException { callback.afterMigrate(connectionUserObjects); return null; } }); } return migrationSuccessCount; } /** * Logs the summary of this migration run. * * @param migrationSuccessCount The number of successfully applied migrations. * @param executionTime The total time taken to perform this migration run (in ms). */ private void logSummary(int migrationSuccessCount, long executionTime) { if (migrationSuccessCount == 0) { LOG.info("Schema " + schema + " is up to date. No migration necessary."); return; } if (migrationSuccessCount == 1) { LOG.info("Successfully applied 1 migration to schema " + schema + " (execution time " + TimeFormat.format(executionTime) + ")."); } else { LOG.info("Successfully applied " + migrationSuccessCount + " migrations to schema " + schema + " (execution time " + TimeFormat.format(executionTime) + ")."); } } /** * Applies this migration to the database. The migration state and the execution time are updated accordingly. * * @param migration The migration to apply. * @param isOutOfOrder If this migration is being applied out of order. * @return The result of the migration. */ private MigrationVersion applyMigration(final MigrationInfoImpl migration, boolean isOutOfOrder) { MigrationVersion version = migration.getVersion(); if (isOutOfOrder) { LOG.info("Migrating schema " + schema + " to version " + version + " (out of order)"); } else { LOG.info("Migrating schema " + schema + " to version " + version); } StopWatch stopWatch = new StopWatch(); stopWatch.start(); try { for (final FlywayCallback callback : callbacks) { new TransactionTemplate(connectionUserObjects).execute(new TransactionCallback<Object>() { @Override public Object doInTransaction() throws SQLException { callback.beforeEachMigrate(connectionUserObjects, migration); return null; } }); } final MigrationExecutor migrationExecutor = migration.getResolvedMigration().getExecutor(); if (migrationExecutor.executeInTransaction()) { new TransactionTemplate(connectionUserObjects).execute(new TransactionCallback<Void>() { public Void doInTransaction() throws SQLException { migrationExecutor.execute(connectionUserObjects); return null; } }); } else { try { migrationExecutor.execute(connectionUserObjects); } catch (SQLException e) { throw new FlywayException("Unable to apply migration", e); } } LOG.debug("Successfully completed and committed migration of schema " + schema + " to version " + version); for (final FlywayCallback callback : callbacks) { new TransactionTemplate(connectionUserObjects).execute(new TransactionCallback<Object>() { @Override public Object doInTransaction() throws SQLException { callback.afterEachMigrate(connectionUserObjects, migration); return null; } }); } } catch (FlywayException e) { String failedMsg = "Migration of schema " + schema + " to version " + version + " failed!"; if (dbSupport.supportsDdlTransactions()) { LOG.error(failedMsg + " Changes successfully rolled back."); } else { LOG.error(failedMsg + " Please restore backups and roll back database and code!"); stopWatch.stop(); int executionTime = (int) stopWatch.getTotalTimeMillis(); AppliedMigration appliedMigration = new AppliedMigration(version, migration.getDescription(), migration.getType(), migration.getScript(), migration.getChecksum(), executionTime, false); metaDataTable.addAppliedMigration(appliedMigration); } throw e; } stopWatch.stop(); int executionTime = (int) stopWatch.getTotalTimeMillis(); AppliedMigration appliedMigration = new AppliedMigration(version, migration.getDescription(), migration.getType(), migration.getScript(), migration.getChecksum(), executionTime, true); metaDataTable.addAppliedMigration(appliedMigration); return version; } }
package com.github.riccardove.easyjasub; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.util.List; import org.xml.sax.SAXException; import com.github.riccardove.easyjasub.commons.CommonsLangStringUtils; import com.github.riccardove.easyjasub.dictionary.EasyJaSubDictionary; import com.github.riccardove.easyjasub.inputtextsub.InputTextSubException; class EasyJaSubFeedback implements EasyJaSubObserver { private final PrintWriter outputStream; private final PrintWriter errorStream; private final int verbose; public EasyJaSubFeedback(PrintWriter outputStream, PrintWriter errorStream, int verbose) { this.outputStream = outputStream; this.errorStream = errorStream; this.verbose = verbose; } private void println(String text) { outputStream.println(text); } private void lowVerboseMessage(String message) { verboseMessage(0, message); } private void mediumVerboseMessage(String message) { verboseMessage(1, message); } private void verboseMessage(int level, String message) { if (verbose >= level) { println(message); flushOutput(); } } private void flushOutput() { outputStream.flush(); } @Override public void onReadJapaneseSubtitlesStart(File file) { lowVerboseMessage("onReadJapaneseSubtitlesStart " + toString(file)); } @Override public void onReadJapaneseSubtitlesEnd(File file) { lowVerboseMessage("onReadJapaneseSubtitlesEnd " + toString(file)); } @Override public void onReadTranslatedSubtitlesStart(File file) { lowVerboseMessage("onReadTranslatedSubtitlesStart " + toString(file)); } @Override public void onReadTranslatedSubtitlesEnd(File file) { lowVerboseMessage("onReadTranslatedSubtitlesEnd " + toString(file)); } @Override public void onWriteHtmlStart(File directory, File cssFile) { lowVerboseMessage("onWriteHtmlStart " + toString(directory) + " " + toString(cssFile)); } @Override public void onWriteHtmlEnd(File directory) { lowVerboseMessage("onWriteHtmlEnd " + toString(directory)); } @Override public void onWriteImagesStart(String wkhtml, File htmlFolder, File bdnFolder, int width) { lowVerboseMessage("onWriteImagesStart " + toString(bdnFolder) + " " + wkhtml + " " + width); } @Override public void onWriteImagesEnd(String wkhtml, File htmlFolder, File bdnFolder) { lowVerboseMessage("onWriteImagesEnd " + toString(bdnFolder)); } @Override public void onWriteBdnXmlFileStart(File file) { lowVerboseMessage("onWriteBdnXmlFileStart " + toString(file)); } @Override public void onWriteBdnXmlFileEnd(File file) { lowVerboseMessage("onWriteBdnXmlFileEnd " + toString(file)); } @Override public void onWriteIdxFileStart(File file, File bdnFile) { lowVerboseMessage("Running BDSup2Sub with arguments -m 100 -x 10 -p keep -T 24p -v -o " + toString(file) + " " + toString(bdnFile) + " to convert generated subtitles"); } @Override public void onWriteIdxFileEnd(File file) { lowVerboseMessage("onWriteIdxFileEnd " + toString(file)); } @Override public void onWriteOutputJapaneseTextFileStart(File file) { lowVerboseMessage("onWriteOutputJapaneseTextFileStart " + toString(file)); } @Override public void onWriteOutputJapaneseTextFileEnd(File file) { lowVerboseMessage("onWriteOutputJapaneseTextFileEnd " + toString(file)); } @Override public void onWriteCssStart(File file) { lowVerboseMessage("onWriteCssStart " + toString(file)); } @Override public void onWriteCssEnd(File file) { lowVerboseMessage("onWriteCssEnd " + toString(file)); } @Override public void onWriteImage(File pngFile, File file) { mediumVerboseMessage("writing image " + toString(pngFile) + " " + toString(file)); } @Override public void onReadJapaneseSubtitlesIOError(File file, IOException ex) throws EasyJaSubException { throw new EasyJaSubException( "Error reading japanese subtitles file " + toString(file) + ": " + ex.getLocalizedMessage()); } @Override public void onReadJapaneseSubtitlesParseError(File file, InputTextSubException ex) throws EasyJaSubException { throw new EasyJaSubException( "Error parsing japanese subtitles file " + toString(file) + " content: " + ex.getLocalizedMessage()); } @Override public void onReadTranslatedSubtitlesIOError(File file, IOException ex) throws EasyJaSubException { throw new EasyJaSubException( "Error reading translated subtitles file " + toString(file) + ": " + ex.getLocalizedMessage()); } @Override public void onReadTranslatedSubtitlesParseError(File file, InputTextSubException ex) throws EasyJaSubException { throw new EasyJaSubException( "Error parsing translated subtitles file " + toString(file) + " content: " + ex.getLocalizedMessage()); } @Override public void onWriteCssIOError(File file, IOException ex) throws EasyJaSubException { throw new EasyJaSubException("Error writing css file " + toString(file) + " : " + ex.getLocalizedMessage()); } @Override public void onWriteHtmlError(File htmlFolder, IOException ex) throws EasyJaSubException { throw new EasyJaSubException("Error writing html file on folder " + toString(htmlFolder) + " : " + ex.getLocalizedMessage()); } @Override public void onWriteImagesWkhtmlError(File bdnFolder, Exception ex) throws EasyJaSubException { throw new EasyJaSubException( "Error invoking wkhtmltoimage to write files on folder " + toString(bdnFolder) + " : " + ex.getLocalizedMessage()); } @Override public void onWriteImagesIOError(File bdnFolder, IOException ex) throws EasyJaSubException { throw new EasyJaSubException("Error writing image files on folder " + toString(bdnFolder) + " : " + ex.getLocalizedMessage()); } @Override public void onWriteBdnXmlFileIOError(File file, IOException ex) throws EasyJaSubException { throw new EasyJaSubException("Error writing BDMXML file " + toString(file) + " : " + ex.getLocalizedMessage()); } @Override public void onWriteOutputJapaneseTextFileIOError(File file, IOException ex) throws EasyJaSubException { throw new EasyJaSubException("Error writing japanese text file " + toString(file) + " : " + ex.getLocalizedMessage()); } @Override public void onTranslatedSubDuplicated(String content, int mSeconds, int startTime) { lowVerboseMessage("Duplicated translation caption " + content + " starting at " + mSeconds + " at " + startTime); } @Override public void onWriteOutputJapaneseTextFileSkipped(File file) { lowVerboseMessage("onWriteOutputJapaneseTextFileSkipped " + toString(file)); } @Override public void onReadTranslatedSubtitlesSkipped(File file) { lowVerboseMessage("onReadTranslatedSubtitlesSkipped " + toString(file)); } @Override public void onWriteCssSkipped(File file) { lowVerboseMessage("onWriteCssSkipped " + toString(file)); } @Override public void onWriteHtmlFile(File file) { mediumVerboseMessage("onWriteHtmlFile " + toString(file)); } @Override public void onWriteHtmlFileSkipped(File file) { lowVerboseMessage("onWriteHtmlFileSkipped " + toString(file)); } @Override public void onWriteBdnXmlFileSkipped(File file) { lowVerboseMessage("onWriteBdnXmlFileSkipped " + toString(file)); } @Override public void onWriteImageSkipped(File pngFile, File file) { lowVerboseMessage("onWriteImageSkipped " + toString(pngFile) + " " + toString(file)); } @Override public void onWriteIdxFileSkipped(File file, File bdnFile) { lowVerboseMessage("onWriteIdxFileSkipped " + toString(file) + " " + toString(bdnFile)); } @Override public void onReadJapaneseSubtitlesSkipped(File file) { lowVerboseMessage("onReadJapaneseSubtitlesSkipped " + toString(file)); } private static String toString(File file) { return file != null ? file.getAbsolutePath() : "<null>"; } private static String toString(String text) { return text != null ? text : "<null>"; } @Override public void onEncodingWarning(String systemEncoding, String charsetstr) { lowVerboseMessage("onEncodingWarning " + systemEncoding); } @Override public void onWriteXmlFileStart(File file) { lowVerboseMessage("onWriteXmlFileStart " + toString(file)); } @Override public void onWriteXmlFileEnd(File file) { lowVerboseMessage("onWriteXmlFileEnd " + toString(file)); } @Override public void onWriteXmlFileIOError(File file, IOException ex) throws EasyJaSubException { throw new EasyJaSubException("Error writing XML file " + toString(file) + ": " + ex.getLocalizedMessage()); } @Override public void onWriteXmlFileSkipped(File file) { lowVerboseMessage("onWriteXmlFileSkipped " + toString(file)); } @Override public void onReadXmlFileStart(File file) { lowVerboseMessage("onReadXmlFileStart " + toString(file)); } @Override public void onReadXmlFileEnd(File file) { lowVerboseMessage("onReadXmlFileEnd " + toString(file)); } @Override public void onReadXmlFileIOError(File file, IOException ex) throws EasyJaSubException { throw new EasyJaSubException("Error reading input XML file " + toString(file) + ": " + ex.getLocalizedMessage()); } @Override public void onReadXmlFileError(File file, SAXException ex) throws EasyJaSubException { throw new EasyJaSubException("Error parsing input XML file " + toString(file) + ": " + ex.getLocalizedMessage()); } @Override public void onWriteJGlossFileStart(File file) { lowVerboseMessage("onWriteJGlossFileStart " + toString(file)); } @Override public void onWriteJGlossFileEnd(File file) { lowVerboseMessage("onWriteJGlossFileEnd " + toString(file)); } @Override public void onWriteJGlossFileIOError(File file, IOException ex) throws EasyJaSubException { throw new EasyJaSubException("Error writing JGloss file " + toString(file) + ": " + ex.getLocalizedMessage()); } @Override public void onWriteJGlossFileSkipped(File file) { lowVerboseMessage("onWriteJGlossFileSkipped " + toString(file)); } @Override public void onLuceneErrors(List<String> pronunciationErrors) { if (pronunciationErrors.size() > 0) { lowVerboseMessage("pronunciationErrors " + pronunciationErrors.size() + SystemProperty.getLineSeparator() + CommonsLangStringUtils.join(pronunciationErrors, SystemProperty.getLineSeparator())); } } @Override public void onLuceneParseStart() { lowVerboseMessage("onLuceneParseStart"); } @Override public void onLuceneParseEnd() { lowVerboseMessage("onLuceneParseEnd"); } @Override public void onConvertToHtmlSubtitleListStart(File htmlFolder) { lowVerboseMessage("onConvertToHtmlSubtitleListStart"); } @Override public void onConvertToHtmlSubtitleListEnd(File htmlFolder) { lowVerboseMessage("onConvertToHtmlSubtitleListEnd"); } @Override public void onConvertToHtmlSubtitleListError(File htmlFolder, IOException ex) throws EasyJaSubException { throw new EasyJaSubException("Error generating HTML files: " + ex.getLocalizedMessage()); } @Override public void onDictionaryDeserialize(File file) { lowVerboseMessage("onDictionaryDeserialize " + toString(file)); } @Override public void onDictionaryDeserialized(File file, EasyJaSubDictionary dictionary) { lowVerboseMessage("onDictionaryDeserialized " + toString(file)); } @Override public void onDictionaryDeserializeError(File file, Exception ex) { lowVerboseMessage("Error deserializing dictionary " + toString(file) + ": " + ex.getLocalizedMessage()); } @Override public void onDictionaryJMDictParse(File file) { lowVerboseMessage("onDictionaryJMDictParse " + toString(file)); } @Override public void onDictionaryJMDictParsed(File file) { lowVerboseMessage("onDictionaryJMDictParsed " + toString(file)); } @Override public void onDictionaryJMDictParseError(File file, Exception ex) { lowVerboseMessage("Error parsing JMDict file " + toString(file) + ": " + ex.getLocalizedMessage()); } @Override public void onDictionarySerialize(File file) { lowVerboseMessage("onDictionarySeserialize " + toString(file)); } @Override public void onDictionarySerialized(File file) { lowVerboseMessage("onDictionarySerialized " + toString(file)); } @Override public void onDictionarySerializeError(File file, Exception ex) { lowVerboseMessage("Error serializing dictionary " + toString(file) + ": " + ex.getLocalizedMessage()); } @Override public void onWriteImageError(File pngFile, File file) throws EasyJaSubException { throw new EasyJaSubException("Error writing picture file " + toString(pngFile) + " " + toString(file)); } @Override public void onSupConvertMessage(String message) { lowVerboseMessage("Subtitle conversion: " + message); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v10/services/reach_plan_service.proto package com.google.ads.googleads.v10.services; /** * <pre> * The reach curve for the planned products. * </pre> * * Protobuf type {@code google.ads.googleads.v10.services.ReachCurve} */ public final class ReachCurve extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v10.services.ReachCurve) ReachCurveOrBuilder { private static final long serialVersionUID = 0L; // Use ReachCurve.newBuilder() to construct. private ReachCurve(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ReachCurve() { reachForecasts_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ReachCurve(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ReachCurve( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { reachForecasts_ = new java.util.ArrayList<com.google.ads.googleads.v10.services.ReachForecast>(); mutable_bitField0_ |= 0x00000001; } reachForecasts_.add( input.readMessage(com.google.ads.googleads.v10.services.ReachForecast.parser(), extensionRegistry)); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000001) != 0)) { reachForecasts_ = java.util.Collections.unmodifiableList(reachForecasts_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v10_services_ReachCurve_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v10_services_ReachCurve_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.services.ReachCurve.class, com.google.ads.googleads.v10.services.ReachCurve.Builder.class); } public static final int REACH_FORECASTS_FIELD_NUMBER = 1; private java.util.List<com.google.ads.googleads.v10.services.ReachForecast> reachForecasts_; /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v10.services.ReachForecast> getReachForecastsList() { return reachForecasts_; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v10.services.ReachForecastOrBuilder> getReachForecastsOrBuilderList() { return reachForecasts_; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ @java.lang.Override public int getReachForecastsCount() { return reachForecasts_.size(); } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v10.services.ReachForecast getReachForecasts(int index) { return reachForecasts_.get(index); } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v10.services.ReachForecastOrBuilder getReachForecastsOrBuilder( int index) { return reachForecasts_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < reachForecasts_.size(); i++) { output.writeMessage(1, reachForecasts_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < reachForecasts_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, reachForecasts_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v10.services.ReachCurve)) { return super.equals(obj); } com.google.ads.googleads.v10.services.ReachCurve other = (com.google.ads.googleads.v10.services.ReachCurve) obj; if (!getReachForecastsList() .equals(other.getReachForecastsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getReachForecastsCount() > 0) { hash = (37 * hash) + REACH_FORECASTS_FIELD_NUMBER; hash = (53 * hash) + getReachForecastsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.services.ReachCurve parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.services.ReachCurve parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.services.ReachCurve parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v10.services.ReachCurve prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The reach curve for the planned products. * </pre> * * Protobuf type {@code google.ads.googleads.v10.services.ReachCurve} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.services.ReachCurve) com.google.ads.googleads.v10.services.ReachCurveOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v10_services_ReachCurve_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v10_services_ReachCurve_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.services.ReachCurve.class, com.google.ads.googleads.v10.services.ReachCurve.Builder.class); } // Construct using com.google.ads.googleads.v10.services.ReachCurve.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getReachForecastsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (reachForecastsBuilder_ == null) { reachForecasts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { reachForecastsBuilder_.clear(); } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v10.services.ReachPlanServiceProto.internal_static_google_ads_googleads_v10_services_ReachCurve_descriptor; } @java.lang.Override public com.google.ads.googleads.v10.services.ReachCurve getDefaultInstanceForType() { return com.google.ads.googleads.v10.services.ReachCurve.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v10.services.ReachCurve build() { com.google.ads.googleads.v10.services.ReachCurve result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v10.services.ReachCurve buildPartial() { com.google.ads.googleads.v10.services.ReachCurve result = new com.google.ads.googleads.v10.services.ReachCurve(this); int from_bitField0_ = bitField0_; if (reachForecastsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { reachForecasts_ = java.util.Collections.unmodifiableList(reachForecasts_); bitField0_ = (bitField0_ & ~0x00000001); } result.reachForecasts_ = reachForecasts_; } else { result.reachForecasts_ = reachForecastsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v10.services.ReachCurve) { return mergeFrom((com.google.ads.googleads.v10.services.ReachCurve)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v10.services.ReachCurve other) { if (other == com.google.ads.googleads.v10.services.ReachCurve.getDefaultInstance()) return this; if (reachForecastsBuilder_ == null) { if (!other.reachForecasts_.isEmpty()) { if (reachForecasts_.isEmpty()) { reachForecasts_ = other.reachForecasts_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureReachForecastsIsMutable(); reachForecasts_.addAll(other.reachForecasts_); } onChanged(); } } else { if (!other.reachForecasts_.isEmpty()) { if (reachForecastsBuilder_.isEmpty()) { reachForecastsBuilder_.dispose(); reachForecastsBuilder_ = null; reachForecasts_ = other.reachForecasts_; bitField0_ = (bitField0_ & ~0x00000001); reachForecastsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getReachForecastsFieldBuilder() : null; } else { reachForecastsBuilder_.addAllMessages(other.reachForecasts_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v10.services.ReachCurve parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v10.services.ReachCurve) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v10.services.ReachForecast> reachForecasts_ = java.util.Collections.emptyList(); private void ensureReachForecastsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { reachForecasts_ = new java.util.ArrayList<com.google.ads.googleads.v10.services.ReachForecast>(reachForecasts_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v10.services.ReachForecast, com.google.ads.googleads.v10.services.ReachForecast.Builder, com.google.ads.googleads.v10.services.ReachForecastOrBuilder> reachForecastsBuilder_; /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public java.util.List<com.google.ads.googleads.v10.services.ReachForecast> getReachForecastsList() { if (reachForecastsBuilder_ == null) { return java.util.Collections.unmodifiableList(reachForecasts_); } else { return reachForecastsBuilder_.getMessageList(); } } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public int getReachForecastsCount() { if (reachForecastsBuilder_ == null) { return reachForecasts_.size(); } else { return reachForecastsBuilder_.getCount(); } } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public com.google.ads.googleads.v10.services.ReachForecast getReachForecasts(int index) { if (reachForecastsBuilder_ == null) { return reachForecasts_.get(index); } else { return reachForecastsBuilder_.getMessage(index); } } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public Builder setReachForecasts( int index, com.google.ads.googleads.v10.services.ReachForecast value) { if (reachForecastsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReachForecastsIsMutable(); reachForecasts_.set(index, value); onChanged(); } else { reachForecastsBuilder_.setMessage(index, value); } return this; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public Builder setReachForecasts( int index, com.google.ads.googleads.v10.services.ReachForecast.Builder builderForValue) { if (reachForecastsBuilder_ == null) { ensureReachForecastsIsMutable(); reachForecasts_.set(index, builderForValue.build()); onChanged(); } else { reachForecastsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public Builder addReachForecasts(com.google.ads.googleads.v10.services.ReachForecast value) { if (reachForecastsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReachForecastsIsMutable(); reachForecasts_.add(value); onChanged(); } else { reachForecastsBuilder_.addMessage(value); } return this; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public Builder addReachForecasts( int index, com.google.ads.googleads.v10.services.ReachForecast value) { if (reachForecastsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureReachForecastsIsMutable(); reachForecasts_.add(index, value); onChanged(); } else { reachForecastsBuilder_.addMessage(index, value); } return this; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public Builder addReachForecasts( com.google.ads.googleads.v10.services.ReachForecast.Builder builderForValue) { if (reachForecastsBuilder_ == null) { ensureReachForecastsIsMutable(); reachForecasts_.add(builderForValue.build()); onChanged(); } else { reachForecastsBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public Builder addReachForecasts( int index, com.google.ads.googleads.v10.services.ReachForecast.Builder builderForValue) { if (reachForecastsBuilder_ == null) { ensureReachForecastsIsMutable(); reachForecasts_.add(index, builderForValue.build()); onChanged(); } else { reachForecastsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public Builder addAllReachForecasts( java.lang.Iterable<? extends com.google.ads.googleads.v10.services.ReachForecast> values) { if (reachForecastsBuilder_ == null) { ensureReachForecastsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, reachForecasts_); onChanged(); } else { reachForecastsBuilder_.addAllMessages(values); } return this; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public Builder clearReachForecasts() { if (reachForecastsBuilder_ == null) { reachForecasts_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { reachForecastsBuilder_.clear(); } return this; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public Builder removeReachForecasts(int index) { if (reachForecastsBuilder_ == null) { ensureReachForecastsIsMutable(); reachForecasts_.remove(index); onChanged(); } else { reachForecastsBuilder_.remove(index); } return this; } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public com.google.ads.googleads.v10.services.ReachForecast.Builder getReachForecastsBuilder( int index) { return getReachForecastsFieldBuilder().getBuilder(index); } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public com.google.ads.googleads.v10.services.ReachForecastOrBuilder getReachForecastsOrBuilder( int index) { if (reachForecastsBuilder_ == null) { return reachForecasts_.get(index); } else { return reachForecastsBuilder_.getMessageOrBuilder(index); } } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v10.services.ReachForecastOrBuilder> getReachForecastsOrBuilderList() { if (reachForecastsBuilder_ != null) { return reachForecastsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(reachForecasts_); } } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public com.google.ads.googleads.v10.services.ReachForecast.Builder addReachForecastsBuilder() { return getReachForecastsFieldBuilder().addBuilder( com.google.ads.googleads.v10.services.ReachForecast.getDefaultInstance()); } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public com.google.ads.googleads.v10.services.ReachForecast.Builder addReachForecastsBuilder( int index) { return getReachForecastsFieldBuilder().addBuilder( index, com.google.ads.googleads.v10.services.ReachForecast.getDefaultInstance()); } /** * <pre> * All points on the reach curve. * </pre> * * <code>repeated .google.ads.googleads.v10.services.ReachForecast reach_forecasts = 1;</code> */ public java.util.List<com.google.ads.googleads.v10.services.ReachForecast.Builder> getReachForecastsBuilderList() { return getReachForecastsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v10.services.ReachForecast, com.google.ads.googleads.v10.services.ReachForecast.Builder, com.google.ads.googleads.v10.services.ReachForecastOrBuilder> getReachForecastsFieldBuilder() { if (reachForecastsBuilder_ == null) { reachForecastsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v10.services.ReachForecast, com.google.ads.googleads.v10.services.ReachForecast.Builder, com.google.ads.googleads.v10.services.ReachForecastOrBuilder>( reachForecasts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); reachForecasts_ = null; } return reachForecastsBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.services.ReachCurve) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v10.services.ReachCurve) private static final com.google.ads.googleads.v10.services.ReachCurve DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v10.services.ReachCurve(); } public static com.google.ads.googleads.v10.services.ReachCurve getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ReachCurve> PARSER = new com.google.protobuf.AbstractParser<ReachCurve>() { @java.lang.Override public ReachCurve parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ReachCurve(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ReachCurve> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ReachCurve> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v10.services.ReachCurve getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.index.merge; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.commons.json.JsonObject; import org.apache.jackrabbit.oak.commons.json.JsopBuilder; import org.apache.jackrabbit.oak.plugins.index.search.spi.query.IndexName; /** * The index diff tools allows to compare and merge indexes */ public class IndexDiff { private static final String OAK_INDEX = "/oak:index/"; static JsonObject extract(String extractFile, String indexName) { JsonObject indexDefs = parseIndexDefinitions(extractFile); JsonObject index = indexDefs.getChildren().get(indexName); removeUninterestingIndexProperties(indexDefs); simplify(index); return index; } static void extractAll(String extractFile, String extractTargetDirectory) { new File(extractTargetDirectory).mkdirs(); JsonObject indexDefs = parseIndexDefinitions(extractFile); removeUninterestingIndexProperties(indexDefs); sortPropertiesByName(indexDefs); for (String child : indexDefs.getChildren().keySet()) { JsonObject index = indexDefs.getChildren().get(child); simplify(index); String fileName = child.replaceAll(OAK_INDEX, ""); fileName = fileName.replace(':', '-'); Path p = Paths.get(extractTargetDirectory, fileName + ".json"); writeFile(p, index); } } private static void writeFile(Path p, JsonObject json) { try { Files.write(p, json.toString().getBytes()); } catch (IOException e) { throw new IllegalStateException("Error writing file: " + p, e); } } static JsonObject collectCustomizations(String directory) { Path indexPath = Paths.get(directory); JsonObject target = new JsonObject(true); collectCustomizationsInDirectory(indexPath, target); return target; } static JsonObject mergeIndexes(String directory, String newIndexFile) { JsonObject newIndex = null; if (newIndexFile != null && !newIndexFile.isEmpty()) { newIndex = parseIndexDefinitions(newIndexFile); } Path indexPath = Paths.get(directory); JsonObject target = new JsonObject(true); mergeIndexesInDirectory(indexPath, newIndex, target); for(String key : target.getChildren().keySet()) { JsonObject c = target.getChildren().get(key); removeUninterestingIndexProperties(c); sortPropertiesByName(c); simplify(c); target.getChildren().put(key, c); } return target; } static void mergeIndex(String oldIndexFile, String newIndexFile, String targetDirectory) { JsonObject oldIndexes = parseIndexDefinitions(oldIndexFile); removeUninterestingIndexProperties(oldIndexes); sortPropertiesByName(oldIndexes); simplify(oldIndexes); JsonObject newIndexes = parseIndexDefinitions(newIndexFile); removeUninterestingIndexProperties(newIndexes); sortPropertiesByName(newIndexes); simplify(newIndexes); List<IndexName> newNames = newIndexes.getChildren().keySet().stream().map(s -> IndexName.parse(s)) .collect(Collectors.toList()); List<IndexName> allNames = oldIndexes.getChildren().keySet().stream().map(s -> IndexName.parse(s)) .collect(Collectors.toList()); for (IndexName n : newNames) { if (n.getCustomerVersion() == 0) { IndexName latest = n.getLatestCustomized(allNames); IndexName ancestor = n.getLatestProduct(allNames); if (latest != null && ancestor != null) { if (n.compareTo(latest) <= 0 || n.compareTo(ancestor) <= 0) { // ignore older versions of indexes continue; } JsonObject latestCustomized = oldIndexes.getChildren().get(latest.getNodeName()); String fileName = PathUtils.getName(latest.getNodeName()); writeFile(Paths.get(targetDirectory, fileName + ".json"), addParent(latest.getNodeName(), latestCustomized)); JsonObject latestAncestor = oldIndexes.getChildren().get(ancestor.getNodeName()); fileName = PathUtils.getName(ancestor.getNodeName()); writeFile(Paths.get(targetDirectory, fileName + ".json"), addParent(ancestor.getNodeName(), latestAncestor)); JsonObject newProduct = newIndexes.getChildren().get(n.getNodeName()); fileName = PathUtils.getName(n.getNodeName()); writeFile(Paths.get(targetDirectory, fileName + ".json"), addParent(n.getNodeName(), newProduct)); JsonObject oldCustomizations = new JsonObject(true); compareIndexes("", latestAncestor, latestCustomized, oldCustomizations); // the old product index might be disabled oldCustomizations.getChildren().remove("type"); writeFile(Paths.get(targetDirectory, "oldCustomizations.json"), oldCustomizations); JsonObject productChanges = new JsonObject(true); compareIndexes("", latestAncestor, newProduct, productChanges); writeFile(Paths.get(targetDirectory, "productChanges.json"), productChanges); try { JsonObject merged = IndexDefMergerUtils.merge( "", latestAncestor, latest.getNodeName(), latestCustomized, newProduct, n.getNodeName()); fileName = PathUtils.getName(n.nextCustomizedName()); writeFile(Paths.get(targetDirectory, fileName + ".json"), addParent(n.nextCustomizedName(), merged)); JsonObject newCustomizations = new JsonObject(true); compareIndexes("", newProduct, merged, newCustomizations); writeFile(Paths.get(targetDirectory, "newCustomizations.json"), newCustomizations); JsonObject changes = new JsonObject(true); compareIndexes("", oldCustomizations, newCustomizations, changes); writeFile(Paths.get(targetDirectory, "changes.json"), changes); } catch (UnsupportedOperationException e) { throw new UnsupportedOperationException("Index: " + n.getNodeName() + ": " + e.getMessage(), e); } } } } } private static JsonObject addParent(String key, JsonObject obj) { JsonObject result = new JsonObject(true); result.getChildren().put(key, obj); return result; } static JsonObject compareIndexes(String directory, String index1, String index2) { Path indexPath = Paths.get(directory); JsonObject target = new JsonObject(true); compareIndexesIndexesInDirectory(indexPath, index1, index2, target); return target; } private static Stream<Path> indexFiles(Path indexPath) { try { return Files.walk(indexPath). filter(path -> Files.isRegularFile(path)). filter(path -> path.toString().endsWith(".json")). filter(path -> !path.toString().endsWith("allnamespaces.json")). filter(path -> !path.toString().endsWith("-info.json")). filter(path -> !path.toString().endsWith("-stats.json")); } catch (IOException e) { throw new IllegalArgumentException("Error reading from " + indexPath, e); } } private static void sortPropertiesByName(JsonObject obj) { ArrayList<String> props = new ArrayList<>(obj.getProperties().keySet()); if (!props.isEmpty()) { props.sort(null); for(String key : props) { String value = obj.getProperties().remove(key); obj.getProperties().put(key, value); } } for(String child : obj.getChildren().keySet()) { JsonObject c = obj.getChildren().get(child); sortPropertiesByName(c); } } private static void compareIndexesIndexesInDirectory(Path indexPath, String index1, String index2, JsonObject target) { if (Files.isExecutable(indexPath)) { indexFiles(indexPath).forEach(path -> { JsonObject indexDefinitions = IndexDiff.parseIndexDefinitions(path.toString()); compareIndexes(indexDefinitions, indexPath.toString(), path.toString(), index1, index2, target); }); } else { JsonObject allIndexDefinitions = IndexDiff.parseIndexDefinitions(indexPath.toString()); for(String key : allIndexDefinitions.getChildren().keySet()) { JsonObject indexDefinitions = allIndexDefinitions.getChildren().get(key); compareIndexes(indexDefinitions, "", key, index1, index2, target); } } } private static void collectCustomizationsInDirectory(Path indexPath, JsonObject target) { indexFiles(indexPath).forEach(path -> { JsonObject indexDefinitions = IndexDiff.parseIndexDefinitions(path.toString()); showCustomIndexes(indexDefinitions, indexPath.toString(), path.toString(), target); }); } private static void mergeIndexesInDirectory(Path indexPath, JsonObject newIndex, JsonObject target) { indexFiles(indexPath).forEach(path -> { JsonObject indexDefinitions = IndexDiff.parseIndexDefinitions(path.toString()); simplify(indexDefinitions); mergeIndexes(indexDefinitions, indexPath.toString(), path.toString(), newIndex, target); }); } private static void mergeIndexes(JsonObject indexeDefinitions, String basePath, String fileName, JsonObject newIndexes, JsonObject target) { JsonObject targetFile = new JsonObject(true); if (newIndexes != null) { for (String newIndexKey : newIndexes.getChildren().keySet()) { if (indexeDefinitions.getChildren().containsKey(newIndexKey)) { targetFile.getProperties().put(newIndexKey, JsopBuilder.encode("WARNING: already exists")); } } } else { newIndexes = new JsonObject(true); } // the superseded indexes of the old repository List<String> supersededKeys = new ArrayList<>(IndexMerge.getSupersededIndexDefs(indexeDefinitions)); Collections.sort(supersededKeys); // keep only new indexes that are not superseded Map<String, JsonObject> indexMap = indexeDefinitions.getChildren(); for (String superseded : supersededKeys) { indexMap.remove(superseded); } Set<String> indexKeys = indexeDefinitions.getChildren().keySet(); try { IndexDefMergerUtils.merge(newIndexes, indexeDefinitions); Set<String> newIndexKeys = new HashSet<>(newIndexes.getChildren().keySet()); newIndexKeys.removeAll(indexKeys); for (String newIndexKey : newIndexKeys) { JsonObject merged = newIndexes.getChildren().get(newIndexKey); if (merged != null) { targetFile.getChildren().put(newIndexKey, merged); } } } catch (UnsupportedOperationException e) { e.printStackTrace(); targetFile.getProperties().put("failed", JsopBuilder.encode(e.toString())); } addIfNotEmpty(basePath, fileName, targetFile, target); } private static void addIfNotEmpty(String basePath, String fileName, JsonObject targetFile, JsonObject target) { if (!targetFile.getProperties().isEmpty() || !targetFile.getChildren().isEmpty()) { String f = fileName; if (f.startsWith(basePath)) { f = f.substring(basePath.length()); } target.getChildren().put(f, targetFile); } } private static void showCustomIndexes(JsonObject indexDefinitions, String basePath, String fileName, JsonObject target) { JsonObject targetFile = new JsonObject(true); processAndRemoveIllegalIndexNames(indexDefinitions, targetFile); removeUninterestingIndex(indexDefinitions); removeUninterestingIndexProperties(indexDefinitions); removeUnusedIndexes(indexDefinitions); for(String k : indexDefinitions.getChildren().keySet()) { if (!k.startsWith(OAK_INDEX)) { targetFile.getProperties().put(k, JsopBuilder.encode("WARNING: Index not under " + OAK_INDEX)); continue; } if (!k.contains("-custom-")) { continue; } listNewAndCustomizedIndexes(indexDefinitions, k, targetFile); } addIfNotEmpty(basePath, fileName, targetFile, target); } private static void compareIndexes(JsonObject indexDefinitions, String basePath, String fileName, String index1, String index2, JsonObject target) { JsonObject targetFile = new JsonObject(true); JsonObject i1 = indexDefinitions.getChildren().get(index1); JsonObject i2 = indexDefinitions.getChildren().get(index2); if (i1 != null && i2 != null) { compareIndexes("", i1, i2, targetFile); } addIfNotEmpty(basePath, fileName, targetFile, target); } private static void listNewAndCustomizedIndexes(JsonObject indexDefinitions, String indexNodeName, JsonObject target) { JsonObject index = indexDefinitions.getChildren().get(indexNodeName); String nodeName = indexNodeName.substring(OAK_INDEX.length()); IndexName indexName = IndexName.parse(nodeName); String ootb = indexName.getBaseName(); if (indexName.getProductVersion() > 1) { ootb += "-" + indexName.getProductVersion(); } simplify(indexDefinitions); JsonObject ootbIndex = indexDefinitions.getChildren().get(OAK_INDEX + ootb); if (ootbIndex != null) { JsonObject targetCustom = new JsonObject(true); targetCustom.getProperties().put("customizes", JsopBuilder.encode(OAK_INDEX + ootb)); target.getChildren().put(indexNodeName, targetCustom); compareIndexes("", ootbIndex, index, targetCustom); } else { target.getProperties().put(indexNodeName, JsopBuilder.encode("new")); } } private static void processAndRemoveIllegalIndexNames(JsonObject indexDefinitions, JsonObject target) { Set<String> indexes = new HashSet<>(indexDefinitions.getChildren().keySet()); for(String k : indexes) { if (!k.startsWith("/oak:index/")) { continue; } String nodeName = k.substring("/oak:index/".length()); IndexName indexName = IndexName.parse(nodeName); if (!indexName.isLegal()) { target.getProperties().put(k, JsopBuilder.encode("WARNING: Invalid name")); indexDefinitions.getChildren().remove(k); } } } private static void removeUninterestingIndex(JsonObject indexDefinitions) { } private static void compareIndexes(String path, JsonObject ootb, JsonObject custom, JsonObject target) { LinkedHashMap<String, Boolean> properties = new LinkedHashMap<>(); addAllProperties(ootb, properties); addAllProperties(custom, properties); for (String k : properties.keySet()) { String op = ootb.getProperties().get(k); String cp = custom.getProperties().get(k); if (!Objects.equals(op, cp)) { JsonObject change = new JsonObject(true); if (op != null) { change.getProperties().put("old", op); } if (cp != null) { change.getProperties().put("new", cp); } target.getChildren().put(path + k, change); } } LinkedHashMap<String, Boolean> children = new LinkedHashMap<>(); addAllChildren(ootb, children); addAllChildren(custom, children); for (String k : children.keySet()) { JsonObject oc = ootb.getChildren().get(k); JsonObject cc = custom.getChildren().get(k); if (!isSameJson(oc, cc)) { if (oc == null) { target.getProperties().put(path + k, JsopBuilder.encode("added")); } else if (cc == null) { target.getProperties().put(path + k, JsopBuilder.encode("removed")); } else { compareIndexes(path + k + "/", oc, cc, target); } } } compareOrder(path, ootb, custom, target); } private static void addAllChildren(JsonObject source, LinkedHashMap<String, Boolean> target) { for(String k : source.getChildren().keySet()) { target.put(k, true); } } private static void compareOrder(String path, JsonObject ootb, JsonObject custom, JsonObject target) { // list of entries, sorted by how they appear in the ootb case ArrayList<String> bothSortedByOotb = new ArrayList<>(); for(String k : ootb.getChildren().keySet()) { if (custom.getChildren().containsKey(k)) { bothSortedByOotb.add(k); } } // list of entries, sorted by how they appear in the custom case ArrayList<String> bothSortedByCustom = new ArrayList<>(); for(String k : custom.getChildren().keySet()) { if (ootb.getChildren().containsKey(k)) { bothSortedByCustom.add(k); } } if (!bothSortedByOotb.toString().equals(bothSortedByCustom.toString())) { JsonObject change = new JsonObject(true); change.getProperties().put("warning", JsopBuilder.encode("WARNING: Order is different")); change.getProperties().put("ootb", JsopBuilder.encode(bothSortedByOotb.toString())); change.getProperties().put("custom", JsopBuilder.encode(bothSortedByCustom.toString())); target.getChildren().put(path + "<order>", change); } } private static boolean isSameJson(JsonObject a, JsonObject b) { if (a == null || b == null) { return a == null && b == null; } return a.toString().equals(b.toString()); } private static void addAllProperties(JsonObject source, LinkedHashMap<String, Boolean> target) { for(String k : source.getProperties().keySet()) { target.put(k, true); } } private static void removeUnusedIndexes(JsonObject indexDefinitions) { HashMap<String, IndexName> latest = new HashMap<>(); Set<String> indexes = new HashSet<>(indexDefinitions.getChildren().keySet()); for(String k : indexes) { if (!k.startsWith("/oak:index/")) { continue; } String nodeName = k.substring("/oak:index/".length()); IndexName indexName = IndexName.parse(nodeName); String baseName = indexName.getBaseName(); IndexName old = latest.get(baseName); if (old == null) { latest.put(baseName, indexName); } else { if (old.compareTo(indexName) < 0) { if (old.getCustomerVersion() > 0) { indexDefinitions.getChildren().remove("/oak:index/" + old.getNodeName()); } latest.put(baseName, indexName); } else { indexDefinitions.getChildren().remove("/oak:index/" + nodeName); } } } } private static void removeUninterestingIndexProperties(JsonObject indexDefinitions) { for(String k : indexDefinitions.getChildren().keySet()) { JsonObject indexDef = indexDefinitions.getChildren().get(k); indexDef.getProperties().remove("reindexCount"); indexDef.getProperties().remove("reindex"); indexDef.getProperties().remove("seed"); indexDef.getProperties().remove(":version"); } } private static void simplify(JsonObject json) { for(String k : json.getChildren().keySet()) { JsonObject child = json.getChildren().get(k); simplify(child); // the following properties are not strictly needed for display, // but we keep them to avoid issues with validation // child.getProperties().remove("jcr:created"); // child.getProperties().remove("jcr:createdBy"); // child.getProperties().remove("jcr:lastModified"); // child.getProperties().remove("jcr:lastModifiedBy"); // the UUID we remove, because duplicate UUIDs are not allowed child.getProperties().remove("jcr:uuid"); for(String p : child.getProperties().keySet()) { String v = child.getProperties().get(p); if (v.startsWith("\"str:") || v.startsWith("\"nam:")) { v = "\"" + v.substring(5); child.getProperties().put(p, v); } else if (v.startsWith("[") && v.contains("\"nam:")) { v = v.replaceAll("\\[\"nam:", "\\[\""); v = v.replaceAll(", \"nam:", ", \""); child.getProperties().put(p, v); } else if (v.startsWith("\":blobId:")) { String base64 = v.substring(9, v.length() - 1); String clear = new String(java.util.Base64.getDecoder().decode(base64), StandardCharsets.UTF_8); v = JsopBuilder.encode(clear); // we don't update the property, otherwise importing the index // would change the type // child.getProperties().put(p, v); } } } } private static JsonObject parseIndexDefinitions(String jsonFileName) { try { String json = new String(Files.readAllBytes(Paths.get(jsonFileName))); return JsonObject.fromJson(json, true); } catch (Exception e) { throw new IllegalStateException("Error parsing file: " + jsonFileName, e); } } }
/** * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.ntask.core.impl; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.ntask.common.TaskException; import org.wso2.carbon.ntask.common.TaskException.Code; import org.wso2.carbon.ntask.core.TaskInfo; import org.wso2.carbon.ntask.core.TaskLocationResolver; import org.wso2.carbon.ntask.core.TaskServiceContext; import org.wso2.carbon.ntask.core.internal.TasksDSComponent; import com.hazelcast.core.HazelcastInstance; /** * This class represents a task location resolver, which assigns the locations * according to a filtering rules given as parameters. * If task-type-pattern matches and task-name-pattern matches, check existing addresses of * address-pattern, and if addresses exist, select address in round-robin * fashion, if not move onto next rule in sequence, if none matches, the task is * not scheduled. * <property name="rule-[order]">[task-type-pattern],[task-name-pattern],[address-pattern]</property> */ public class RuleBasedLocationResolver implements TaskLocationResolver { private static final String RULE_BASED_TASK_RESOLVER_ID = "__RULE_BASED_TASK_RESOLVER_ID__"; private static final Log log = LogFactory.getLog(RuleBasedLocationResolver.class); private List<Rule> rules = new ArrayList<RuleBasedLocationResolver.Rule>(); @Override public void init(Map<String, String> properties) throws TaskException { int seq; for (Map.Entry<String, String> entry : properties.entrySet()) { if (entry.getKey().startsWith("rule-")) { try { seq = Integer.parseInt(entry.getKey().substring(5)); } catch (NumberFormatException e) { throw new TaskException("The RuleBasedLocationResolver must have the property name in the format of " + "rule-[sequence_number]", Code.CONFIG_ERROR); } this.rules.add(new Rule(seq, entry.getValue())); } } Collections.sort(this.rules); } @Override public int getLocation(TaskServiceContext ctx, TaskInfo taskInfo) throws TaskException { List<Integer> locations; /* if matched by no one, then throws a TaskException to prevent scheduling. */ int result = -1; for (Rule rule : this.rules) { try { locations = rule.evaluate(ctx, taskInfo); } catch (Exception e) { throw new TaskException("Error in rule evaluation in RuleBasedLocationResolver: " + e.getMessage(), Code.UNKNOWN); } if (locations.size() > 0) { if (log.isDebugEnabled()) { log.debug("Task rule hit: " + rule + " for task: [" + ctx.getTaskType() + "][" + taskInfo.getName() + "]"); log.debug("Task rule hit: " + rule +" Location count : " + locations.size()); } result = this.getRoundRobinLocation(rule, locations); break; } } if (log.isDebugEnabled()) { log.debug("Task location resolved to: " + result + " for task: [" + ctx.getTaskType() + "][" + taskInfo.getName() + "]"); } if (result == -1) { throw new TaskException("Task location unavailable for RuleBasedLocationResolver: " + ctx.getTaskType() + "#" + taskInfo.getName(), Code.TASK_NODE_NOT_AVAILABLE); } return result; } private int getRoundRobinLocation(Rule rule, List<Integer> locations) { if (log.isDebugEnabled()) { log.debug("Performing RoundRobin for " + rule); } HazelcastInstance hz = TasksDSComponent.getHazelcastInstance(); if (hz == null) { return 0; } int result = (int) Math.abs(hz.getAtomicLong(RULE_BASED_TASK_RESOLVER_ID + rule.hashCode()).incrementAndGet()); result = locations.get(result % locations.size()); if (log.isDebugEnabled()) { log.debug("Selected Node for " + rule + " is " + result); } return result; } private class Rule implements Comparable<Rule> { private int sequence; private String taskTypePattern; private String taskNamePattern; private String addressPattern; public Rule(int sequence, String entry) throws TaskException { this.sequence = sequence; String[] tokens = entry.split(","); if (tokens.length != 3) { throw new TaskException("The RuleBasedLocationResolver must have the properties in the format of " + "[task-type-pattern],[task-name-pattern],[address-pattern]", Code.CONFIG_ERROR); } this.taskTypePattern = tokens[0]; this.taskNamePattern = tokens[1]; this.addressPattern = tokens[2]; } public int getSequence() { return sequence; } public String getTaskTypePattern() { return taskTypePattern; } public String getTaskNamePattern() { return taskNamePattern; } public String getAddressPattern() { return addressPattern; } @Override public int hashCode() { return (this.getSequence() + ":" + this.getTaskTypePattern() + ":" + this.getTaskNamePattern() + this.getAddressPattern()).hashCode(); } @Override public int compareTo(Rule rhs) { return this.getSequence() - rhs.getSequence(); } @Override public String toString() { return "Rule [" + this.getSequence() + "] - " + this.getTaskTypePattern() + "," + this.getTaskNamePattern() + "," + this.getAddressPattern(); } public List<Integer> evaluate(TaskServiceContext ctx, TaskInfo taskInfo) { List<Integer> result = new ArrayList<Integer>(); if (ctx.getTaskType().matches(this.getTaskTypePattern())) { if (taskInfo.getName().matches(this.getTaskNamePattern())) { int count = ctx.getServerCount(); InetSocketAddress sockAddr; InetAddress inetAddr; if (log.isDebugEnabled()) { log.debug("Task server count : " + count); log.debug("Address pattern : " + this.addressPattern); } String ip = null, host1, host2 = null, identifier = null; for (int i = 0; i < count; i++) { sockAddr = ctx.getServerAddress(i); identifier = ctx.getServerIdentifier(i); if (sockAddr != null) { host1 = sockAddr.getHostName(); if (log.isDebugEnabled()) { log.debug("Hostname 1 : " + host1); } inetAddr = sockAddr.getAddress(); if (inetAddr != null) { ip = inetAddr.getHostAddress(); host2 = inetAddr.getCanonicalHostName(); if (log.isDebugEnabled()) { log.debug("IP address : " + ip); log.debug("Hostname 1 : " + host2); } } if (host1.matches(this.getAddressPattern())) { if (log.isDebugEnabled()) { log.debug("Hostname 1 matched"); } result.add(i); } else if (ip != null && ip.matches(this.getAddressPattern())) { if (log.isDebugEnabled()) { log.debug("IP address matched"); } result.add(i); } else if (!host1.equals(host2) && host2 != null && host2.matches(this.getAddressPattern())) { if (log.isDebugEnabled()) { log.debug("Hostname 2 matched"); } result.add(i); } else if (identifier != null && identifier.matches(this.getAddressPattern())) { if (log.isDebugEnabled()) { log.debug("localMemberIdentifier : " + identifier); log.debug("localMemberIdentifier matched"); } result.add(i); } } else { log.warn("RuleBasedLocationResolver: cannot find the host address for node: " + i); } } } } return result; } } }
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.hbaseinput; import java.net.MalformedURLException; import java.net.URL; import java.text.DecimalFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.hadoop.HadoopConfigurationBootstrap; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.step.BaseStepData; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.hadoop.shim.HadoopConfiguration; import org.pentaho.hbase.HBaseRowToKettleTuple; import org.pentaho.hbase.shim.api.ColumnFilter; import org.pentaho.hbase.shim.api.HBaseValueMeta; import org.pentaho.hbase.shim.api.Mapping; import org.pentaho.hbase.shim.spi.HBaseBytesUtilShim; import org.pentaho.hbase.shim.spi.HBaseConnection; import org.pentaho.hbase.shim.spi.HBaseShim; /** * Class providing an input step for reading data from an HBase table according to meta data mapping info stored in a * separate HBase table called "pentaho_mappings". See org.pentaho.hbase.mapping.Mapping for details on the meta data * format. * * @author Mark Hall (mhall{[at]}pentaho{[dot]}com) * @version $Revision$ * */ public class HBaseInputData extends BaseStepData implements StepDataInterface { /** The output data format */ protected RowMetaInterface m_outputRowMeta; /** * Get the output row format * * @return the output row format */ public RowMetaInterface getOutputRowMeta() { return m_outputRowMeta; } /** * Set the output row format * * @param rmi * the output row format */ public void setOutputRowMeta( RowMetaInterface rmi ) { m_outputRowMeta = rmi; } /** * Get an administrative connection to HBase. * * @param zookeeperHosts * the list of zookeeper host(s) * @param zookeeperPort * the zookeeper port (null for default) * @param siteConfig * optional path to site config * @param defaultConfig * optional path to default config * @param logging * a list for holding log messages generated when obtaining the connection * @return an administrative connection to HBase * @throws Exception * if a problem occurs */ public static HBaseConnection getHBaseConnection( String zookeeperHosts, String zookeeperPort, String siteConfig, String defaultConfig, List<String> logging ) throws Exception { Properties connProps = new Properties(); if ( !Const.isEmpty( zookeeperHosts ) ) { connProps.setProperty( HBaseConnection.ZOOKEEPER_QUORUM_KEY, zookeeperHosts ); } if ( !Const.isEmpty( zookeeperPort ) ) { connProps.setProperty( HBaseConnection.ZOOKEEPER_PORT_KEY, zookeeperPort ); } if ( !Const.isEmpty( siteConfig ) ) { connProps.setProperty( HBaseConnection.SITE_KEY, siteConfig ); } if ( !Const.isEmpty( defaultConfig ) ) { connProps.setProperty( HBaseConnection.DEFAULTS_KEY, defaultConfig ); } HadoopConfiguration active = HadoopConfigurationBootstrap.getHadoopConfigurationProvider().getActiveConfiguration(); HBaseShim hbaseShim = active.getHBaseShim(); HBaseConnection conn = hbaseShim.getHBaseConnection(); conn.configureConnection( connProps, logging ); return conn; } /** * Utility method to covert a string to a URL object. * * @param pathOrURL * file or http URL as a string * @return a URL * @throws MalformedURLException * if there is a problem with the URL. */ public static URL stringToURL( String pathOrURL ) throws MalformedURLException { URL result = null; if ( !Const.isEmpty( pathOrURL ) ) { if ( pathOrURL.toLowerCase().startsWith( "http://" ) || pathOrURL.toLowerCase().startsWith( "file://" ) ) { result = new URL( pathOrURL ); } else { String c = "file://" + pathOrURL; result = new URL( c ); } } return result; } /** * Initialize the table scan with start and stop key values (if supplied) * * @param hbAdmin * the connection to use * @param bytesUtil * the byte conversion utils to use * @param tableMapping * the table mapping info to use * @param dateOrNumberConversionMaskForKey * conversion pattern for data/numbers * @param keyStartS * the key start value * @param keyStopS * the key stop value * @param scannerCacheSize * the size of the scanner cache * @param log * the log * @param vars * variables * @throws KettleException * if a problem occurs */ public static void initializeScan( HBaseConnection hbAdmin, HBaseBytesUtilShim bytesUtil, Mapping tableMapping, String dateOrNumberConversionMaskForKey, String keyStartS, String keyStopS, String scannerCacheSize, LogChannelInterface log, VariableSpace vars ) throws KettleException { byte[] keyLowerBound = null; byte[] keyUpperBound = null; // Set up the scan if ( !Const.isEmpty( keyStartS ) ) { keyStartS = vars.environmentSubstitute( keyStartS ); String convM = dateOrNumberConversionMaskForKey; if ( tableMapping.getKeyType() == Mapping.KeyType.BINARY ) { // assume we have a hex encoded string keyLowerBound = HBaseValueMeta.encodeKeyValue( keyStartS, tableMapping.getKeyType(), bytesUtil ); } else if ( tableMapping.getKeyType() != Mapping.KeyType.STRING ) { // allow a conversion mask in the start key field to override any // specified for // the key in the user specified fields String[] parts = keyStartS.split( "@" ); if ( parts.length == 2 ) { keyStartS = parts[0]; convM = parts[1]; } if ( !Const.isEmpty( convM ) && convM.length() > 0 ) { if ( tableMapping.getKeyType() == Mapping.KeyType.DATE || tableMapping.getKeyType() == Mapping.KeyType.UNSIGNED_DATE ) { SimpleDateFormat sdf = new SimpleDateFormat(); sdf.applyPattern( convM ); try { Date d = sdf.parse( keyStartS ); keyLowerBound = HBaseValueMeta.encodeKeyValue( d, tableMapping.getKeyType(), bytesUtil ); } catch ( ParseException e ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToParseLowerBoundKeyValue", keyStartS ), e ); } } else { // Number type // Double/Float or Long/Integer DecimalFormat df = new DecimalFormat(); df.applyPattern( convM ); Number num = null; try { num = df.parse( keyStartS ); keyLowerBound = HBaseValueMeta.encodeKeyValue( num, tableMapping.getKeyType(), bytesUtil ); } catch ( ParseException e ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToParseLowerBoundKeyValue", keyStartS ), e ); } } } else { // just try it as a string keyLowerBound = HBaseValueMeta.encodeKeyValue( keyStartS, tableMapping.getKeyType(), bytesUtil ); } } else { // it is a string keyLowerBound = HBaseValueMeta.encodeKeyValue( keyStartS, tableMapping.getKeyType(), bytesUtil ); } if ( !Const.isEmpty( keyStopS ) ) { keyStopS = vars.environmentSubstitute( keyStopS ); convM = dateOrNumberConversionMaskForKey; if ( tableMapping.getKeyType() == Mapping.KeyType.BINARY ) { // assume we have a hex encoded string keyUpperBound = HBaseValueMeta.encodeKeyValue( keyStopS, tableMapping.getKeyType(), bytesUtil ); } else if ( tableMapping.getKeyType() != Mapping.KeyType.STRING ) { // allow a conversion mask in the stop key field to override any // specified for // the key in the user specified fields String[] parts = keyStopS.split( "@" ); if ( parts.length == 2 ) { keyStopS = parts[0]; convM = parts[1]; } if ( !Const.isEmpty( convM ) && convM.length() > 0 ) { if ( tableMapping.getKeyType() == Mapping.KeyType.DATE || tableMapping.getKeyType() == Mapping.KeyType.UNSIGNED_DATE ) { SimpleDateFormat sdf = new SimpleDateFormat(); sdf.applyPattern( convM ); try { Date d = sdf.parse( keyStopS ); keyUpperBound = HBaseValueMeta.encodeKeyValue( d, tableMapping.getKeyType(), bytesUtil ); } catch ( ParseException e ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToParseUpperBoundKeyValue", keyStopS ), e ); } } else { // Number type // Double/Float or Long/Integer DecimalFormat df = new DecimalFormat(); df.applyPattern( convM ); Number num = null; try { num = df.parse( keyStopS ); keyUpperBound = HBaseValueMeta.encodeKeyValue( num, tableMapping.getKeyType(), bytesUtil ); } catch ( ParseException e ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToParseUpperBoundKeyValue", keyStopS ), e ); } } } else { // just try it as a string keyUpperBound = HBaseValueMeta.encodeKeyValue( keyStopS, tableMapping.getKeyType(), bytesUtil ); } } else { // it is a string keyUpperBound = HBaseValueMeta.encodeKeyValue( keyStopS, tableMapping.getKeyType(), bytesUtil ); } } } int cacheSize = 0; // set any user-specified scanner caching if ( !Const.isEmpty( scannerCacheSize ) ) { String temp = vars.environmentSubstitute( scannerCacheSize ); cacheSize = Integer.parseInt( temp ); if ( log != null ) { log.logBasic( BaseMessages .getString( HBaseInputMeta.PKG, "HBaseInput.Message.SettingScannerCaching", cacheSize ) ); } } try { hbAdmin.newSourceTableScan( keyLowerBound, keyUpperBound, cacheSize ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToConfigureSourceTableScan" ), ex ); } } /** * Set the specific columns to be returned by the scan. * * @param hbAdmin * the connection to use * @param limitCols * the columns to limit the scan to * @param tableMapping * the mapping information * @throws KettleException * if a problem occurs */ public static void setScanColumns( HBaseConnection hbAdmin, List<HBaseValueMeta> limitCols, Mapping tableMapping ) throws KettleException { for ( HBaseValueMeta currentCol : limitCols ) { if ( !currentCol.isKey() ) { String colFamilyName = currentCol.getColumnFamily(); String qualifier = currentCol.getColumnName(); boolean binaryColName = false; if ( qualifier.startsWith( "@@@binary@@@" ) ) { qualifier = qualifier.replace( "@@@binary@@@", "" ); binaryColName = true; } try { hbAdmin.addColumnToScan( colFamilyName, qualifier, binaryColName ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToAddColumnToScan" ), ex ); } } } } /** * Set column filters to apply server-side to the scan results. * * @param hbAdmin * the connection to use * @param columnFilters * the column filters to apply * @param matchAnyFilter * if true then a row will be returned if any of the filters match (otherwise all have to match) * @param columnsMappedByAlias * the columns defined in the mapping * @param vars * variables to use * @throws KettleException * if a problem occurs */ public static void setScanFilters( HBaseConnection hbAdmin, Collection<ColumnFilter> columnFilters, boolean matchAnyFilter, Map<String, HBaseValueMeta> columnsMappedByAlias, VariableSpace vars ) throws KettleException { for ( ColumnFilter cf : columnFilters ) { String fieldAliasS = vars.environmentSubstitute( cf.getFieldAlias() ); HBaseValueMeta mappedCol = columnsMappedByAlias.get( fieldAliasS ); if ( mappedCol == null ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.ColumnFilterIsNotInTheMapping", fieldAliasS ) ); } // check the type (if set in the ColumnFilter) against the type // of this field in the mapping String fieldTypeS = vars.environmentSubstitute( cf.getFieldType() ); if ( !Const.isEmpty( fieldTypeS ) ) { if ( !mappedCol.getHBaseTypeDesc().equalsIgnoreCase( fieldTypeS ) ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.FieldTypeMismatch", fieldTypeS, fieldAliasS, mappedCol.getHBaseTypeDesc() ) ); } } try { hbAdmin.addColumnFilterToScan( cf, mappedCol, vars, matchAnyFilter ); } catch ( Exception ex ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.UnableToAddColumnFilterToScan" ), ex ); } } } /** * Convert/decode the current hbase row into a list of "tuple" kettle rows * * @param hbAdmin * the connection to use * @param userOutputColumns * user-specified subset of columns (if any) from the mapping * @param columnsMappedByAlias * columns in the mapping keyed by alias * @param tableMapping * the mapping to use * @param tupleHandler * the HBaseRowToKettleTuple to delegate to * @param outputRowMeta * the outgoing row meta * @param bytesUtil * the byte conversion utils to use * @return a list of kettle rows * @throws KettleException * if a problem occurs */ public static List<Object[]> getTupleOutputRows( HBaseConnection hbAdmin, List<HBaseValueMeta> userOutputColumns, Map<String, HBaseValueMeta> columnsMappedByAlias, Mapping tableMapping, HBaseRowToKettleTuple tupleHandler, RowMetaInterface outputRowMeta, HBaseBytesUtilShim bytesUtil ) throws KettleException { if ( userOutputColumns != null && userOutputColumns.size() > 0 ) { return tupleHandler.hbaseRowToKettleTupleMode( null, hbAdmin, tableMapping, userOutputColumns, outputRowMeta ); } else { return tupleHandler.hbaseRowToKettleTupleMode( null, hbAdmin, tableMapping, columnsMappedByAlias, outputRowMeta ); } } /** * Convert/decode the current hbase row into a kettle row * * @param hbAdmin * the connection to use * @param userOutputColumns * user-specified subset of columns (if any) from the mapping * @param columnsMappedByAlias * columns in the mapping keyed by alias * @param tableMapping * the mapping to use * @param outputRowMeta * the outgoing row meta * @param bytesUtil * the byte conversion utils to use * @return a kettle row * @throws KettleException * if a problem occurs */ public static Object[] getOutputRow( HBaseConnection hbAdmin, List<HBaseValueMeta> userOutputColumns, Map<String, HBaseValueMeta> columnsMappedByAlias, Mapping tableMapping, RowMetaInterface outputRowMeta, HBaseBytesUtilShim bytesUtil ) throws KettleException { int size = ( userOutputColumns != null && userOutputColumns.size() > 0 ) ? userOutputColumns.size() : tableMapping .getMappedColumns().keySet().size() + 1; // + 1 // for // the // key Object[] outputRowData = RowDataUtil.allocateRowData( size ); // User-selected output columns? if ( userOutputColumns != null && userOutputColumns.size() > 0 ) { for ( HBaseValueMeta currentCol : userOutputColumns ) { if ( currentCol.isKey() ) { byte[] rawKey = null; try { rawKey = hbAdmin.getResultSetCurrentRowKey(); } catch ( Exception e ) { throw new KettleException( e ); } Object decodedKey = HBaseValueMeta.decodeKeyValue( rawKey, tableMapping, bytesUtil ); int keyIndex = outputRowMeta.indexOfValue( currentCol.getAlias() ); outputRowData[keyIndex] = decodedKey; } else { String colFamilyName = currentCol.getColumnFamily(); String qualifier = currentCol.getColumnName(); boolean binaryColName = false; if ( qualifier.startsWith( "@@@binary@@@" ) ) { qualifier = qualifier.replace( "@@@binary@@@", "" ); // assume hex encoded binaryColName = true; } byte[] kv = null; try { kv = hbAdmin.getResultSetCurrentRowColumnLatest( colFamilyName, qualifier, binaryColName ); } catch ( Exception e ) { throw new KettleException( e ); } int outputIndex = outputRowMeta.indexOfValue( currentCol.getAlias() ); if ( outputIndex < 0 ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.ColumnNotDefinedInOutput", currentCol.getAlias() ) ); } Object decodedVal = HBaseValueMeta.decodeColumnValue( ( kv == null ) ? null : kv, currentCol, bytesUtil ); outputRowData[outputIndex] = decodedVal; } } } else { // do the key first byte[] rawKey = null; try { rawKey = hbAdmin.getResultSetCurrentRowKey(); } catch ( Exception e ) { throw new KettleException( e ); } Object decodedKey = HBaseValueMeta.decodeKeyValue( rawKey, tableMapping, bytesUtil ); int keyIndex = outputRowMeta.indexOfValue( tableMapping.getKeyName() ); outputRowData[keyIndex] = decodedKey; Set<String> aliasSet = columnsMappedByAlias.keySet(); for ( String name : aliasSet ) { HBaseValueMeta currentCol = columnsMappedByAlias.get( name ); String colFamilyName = currentCol.getColumnFamily(); String qualifier = currentCol.getColumnName(); if ( currentCol.isKey() ) { // skip key as it has already been processed // and is not in the scan's columns continue; } boolean binaryColName = false; if ( qualifier.startsWith( "@@@binary@@@" ) ) { qualifier = qualifier.replace( "@@@binary@@@", "" ); // assume hex encoded binaryColName = true; } byte[] kv = null; try { kv = hbAdmin.getResultSetCurrentRowColumnLatest( colFamilyName, qualifier, binaryColName ); } catch ( Exception e ) { throw new KettleException( e ); } int outputIndex = outputRowMeta.indexOfValue( name ); if ( outputIndex < 0 ) { throw new KettleException( BaseMessages.getString( HBaseInputMeta.PKG, "HBaseInput.Error.ColumnNotDefinedInOutput", name ) ); } Object decodedVal = HBaseValueMeta.decodeColumnValue( ( kv == null ) ? null : kv, currentCol, bytesUtil ); outputRowData[outputIndex] = decodedVal; } } return outputRowData; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Borrowed from PostgreSQL JDBC driver */ package kr.co.bitnine.octopus; import org.postgresql.PGProperty; import org.postgresql.core.Logger; import org.postgresql.util.GT; import org.postgresql.util.HostSpec; import org.postgresql.util.PSQLException; import org.postgresql.util.PSQLState; import org.postgresql.util.SharedTimer; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.security.AccessControlException; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.sql.Connection; import java.sql.DriverManager; import java.sql.DriverPropertyInfo; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.ArrayList; import java.util.Enumeration; import java.util.Properties; /** * The Java SQL framework allows for multiple database drivers. Each * driver should supply a class that implements the Driver interface * <p/> * <p>The DriverManager will try to load as many drivers as it can find and * then for any given connection request, it will ask each driver in turn * to try to connect to the target URL. * <p/> * <p>It is strongly recommended that each Driver class should be small and * standalone so that the Driver class can be loaded and queried without * bringing in vast quantities of supporting code. * <p/> * <p>When a Driver class is loaded, it should create an instance of itself * and register it with the DriverManager. This means that a user can load * and register a driver by doing Class.forName("foo.bah.Driver") * * @see org.postgresql.PGConnection * @see java.sql.Driver */ public final class Driver implements java.sql.Driver { // make these public so they can be used in setLogLevel below public static final int DEBUG = 2; public static final int INFO = 1; public static final int OFF = 0; private static Driver registeredDriver; private static final Logger LOGGER = new Logger(); private static boolean logLevelSet; private static SharedTimer sharedTimer = new SharedTimer(LOGGER); static { try { // moved the registerDriver from the constructor to here // because some clients call the driver themselves (I know, as // my early jdbc work did - and that was based on other examples). // Placing it here, means that the driver is registered once only. register(); } catch (SQLException e) { throw new ExceptionInInitializerError(e); } } // Helper to retrieve default properties from classloader resource // properties files. private Properties defaultProperties; private synchronized Properties getDefaultProperties() throws IOException { if (defaultProperties != null) return defaultProperties; // Make sure we load properties with the maximum possible // privileges. try { defaultProperties = (Properties) AccessController.doPrivileged( new PrivilegedExceptionAction() { public Object run() throws IOException { return loadDefaultProperties(); } }); } catch (PrivilegedActionException e) { throw (IOException) e.getException(); } // Use the loglevel from the default properties (if any) // as the driver-wide default unless someone explicitly called // setLogLevel() already. synchronized (Driver.class) { if (!logLevelSet) { String driverLogLevel = PGProperty.LOG_LEVEL.get(defaultProperties); if (driverLogLevel != null) { try { setLogLevel(Integer.parseInt(driverLogLevel)); } catch (Exception ignore) { // invalid value for loglevel; ignore it } } } } return defaultProperties; } private Properties loadDefaultProperties() throws IOException { Properties merged = new Properties(); try { PGProperty.USER.set(merged, System.getProperty("user.name")); } catch (java.lang.SecurityException se) { // We're just trying to set a default, so if we can't // it's not a big deal. } // If we are loaded by the bootstrap classloader, getClassLoader() // may return null. In that case, try to fall back to the system // classloader. // // We should not need to catch SecurityException here as we are // accessing either our own classloader, or the system classloader // when our classloader is null. The ClassLoader javadoc claims // neither case can throw SecurityException. ClassLoader cl = getClass().getClassLoader(); if (cl == null) cl = ClassLoader.getSystemClassLoader(); if (cl == null) { LOGGER.debug("Can't find a classloader for the Driver; not loading driver configuration"); return merged; // Give up on finding defaults. } LOGGER.debug("Loading driver configuration via classloader " + cl); // When loading the driver config files we don't want settings found // in later files in the classpath to override settings specified in // earlier files. To do this we've got to read the returned // Enumeration into temporary storage. ArrayList<URL> urls = new ArrayList(); Enumeration<URL> urlEnum = cl.getResources("org/postgresql/driverconfig.properties"); while (urlEnum.hasMoreElements()) urls.add(urlEnum.nextElement()); for (int i = urls.size() - 1; i >= 0; i--) { URL url = urls.get(i); LOGGER.debug("Loading driver configuration from: " + url); InputStream is = url.openStream(); merged.load(is); is.close(); } return merged; } /** * Try to make a database connection to the given URL. The driver * should return "null" if it realizes it is the wrong kind of * driver to connect to the given URL. This will be common, as * when the JDBC driverManager is asked to connect to a given URL, * it passes the URL to each loaded driver in turn. * <p/> * <p>The driver should raise an SQLException if it is the right driver * to connect to the given URL, but has trouble connecting to the * database. * <p/> * <p>The java.util.Properties argument can be used to pass arbitrary * string tag/value pairs as connection arguments. * <p/> * user - (required) The user to connect as * password - (optional) The password for the user * ssl - (optional) Use SSL when connecting to the server * readOnly - (optional) Set connection to read-only by default * charSet - (optional) The character set to be used for converting * to/from the database to unicode. If multibyte is enabled on the * server then the character set of the database is used as the default, * otherwise the jvm character encoding is used as the default. * This value is only used when connecting to a 7.2 or older server. * loglevel - (optional) Enable logging of messages from the driver. * The value is an integer from 0 to 2 where: * OFF = 0, INFO = 1, DEBUG = 2 * The output is sent to DriverManager.getPrintWriter() if set, * otherwise it is sent to System.out. * compatible - (optional) This is used to toggle * between different functionality as it changes across different releases * of the jdbc driver code. The values here are versions of the jdbc * client and not server versions. For example in 7.1 get/setBytes * worked on LargeObject values, in 7.2 these methods were changed * to work on bytea values. This change in functionality could * be disabled by setting the compatible level to be "7.1", in * which case the driver will revert to the 7.1 functionality. * <p/> * <p>Normally, at least * "user" and "password" properties should be included in the * properties. For a list of supported * character encoding , see * http://java.sun.com/products/jdk/1.2/docs/guide/internat/encoding.doc.html * Note that you will probably want to have set up the Postgres database * itself to use the same encoding, with the "-E <encoding>" argument * to createdb. * <p/> * Our protocol takes the forms: * <PRE> * jdbc:postgresql://host:port/database?param1=val1&... * </PRE> * * @param url the URL of the database to connect to * @param info a list of arbitrary tag/value pairs as connection * arguments * @return a connection to the URL or null if it isnt us * @throws SQLException if a database access error occurs * @see java.sql.Driver#connect */ public java.sql.Connection connect(String url, Properties info) throws SQLException { // get defaults Properties defaults; if (!url.startsWith(OCTOPUS_PROTOCOL)) return null; try { defaults = getDefaultProperties(); } catch (IOException ioe) { throw new PSQLException(GT.tr("Error loading default settings from driverconfig.properties"), PSQLState.UNEXPECTED_ERROR, ioe); } // override defaults with provided properties Properties props = new Properties(defaults); if (info != null) { Enumeration e = info.propertyNames(); while (e.hasMoreElements()) { String propName = (String) e.nextElement(); String propValue = info.getProperty(propName); if (propValue == null) { throw new PSQLException(GT.tr("Properties for the driver contains a non-string value for the key ") + propName, PSQLState.UNEXPECTED_ERROR); } props.setProperty(propName, propValue); } } // parse URL and add more properties props = parseURL(url, props); if (props == null) { LOGGER.debug("Error in url: " + url); return null; } try { LOGGER.debug("Connecting with URL: " + url); // Enforce login timeout, if specified, by running the connection // attempt in a separate thread. If we hit the timeout without the // connection completing, we abandon the connection attempt in // the calling thread, but the separate thread will keep trying. // Eventually, the separate thread will either fail or complete // the connection; at that point we clean up the connection if // we managed to establish one after all. See ConnectThread for // more details. long timeout = timeout(props); if (timeout <= 0) return makeConnection(url, props); ConnectThread ct = new ConnectThread(url, props); Thread thread = new Thread(ct, "Octopus JDBC driver connection thread"); thread.setDaemon(true); // Don't prevent the VM from shutting down thread.start(); return ct.getResult(timeout); } catch (PSQLException ex1) { LOGGER.debug("Connection error:", ex1); // re-throw the exception, otherwise it will be caught next, and a // org.postgresql.unusual error will be returned instead. // FIXME throw ex1; } catch (AccessControlException ace) { throw new PSQLException(GT.tr("Your security policy has prevented the connection from being attempted. You probably need to grant the connect java.net.SocketPermission to the database server host and port that you wish to connect to."), PSQLState.UNEXPECTED_ERROR, ace); } catch (Exception ex2) { LOGGER.debug("Unexpected connection error:", ex2); throw new PSQLException(GT.tr("Something unusual has occurred to cause the driver to fail. Please report this exception."), PSQLState.UNEXPECTED_ERROR, ex2); } } /** * Perform a connect in a separate thread; supports * getting the results from the original thread while enforcing * a login timeout. */ private static class ConnectThread implements Runnable { ConnectThread(String url, Properties props) { this.url = url; this.props = props; } public void run() { Connection conn; Throwable error; try { conn = makeConnection(url, props); error = null; } catch (Throwable t) { conn = null; error = t; } synchronized (this) { if (abandoned) { if (conn != null) { try { conn.close(); } catch (SQLException ignored) { } } } else { result = conn; resultException = error; notify(); } } } /** * Get the connection result from this (assumed running) thread. * If the timeout is reached without a result being available, * a SQLException is thrown. * * @param timeout timeout in milliseconds * @return the new connection, if successful * @throws SQLException if a connection error occurs or the timeout is reached */ public Connection getResult(long timeout) throws SQLException { long expiry = System.currentTimeMillis() + timeout; synchronized (this) { while (true) { if (result != null) return result; if (resultException != null) { if (resultException instanceof SQLException) { resultException.fillInStackTrace(); throw (SQLException) resultException; } else { throw new PSQLException(GT.tr("Something unusual has occurred to cause the driver to fail. Please report this exception."), PSQLState.UNEXPECTED_ERROR, resultException); } } long delay = expiry - System.currentTimeMillis(); if (delay <= 0) { abandoned = true; throw new PSQLException(GT.tr("Connection attempt timed out."), PSQLState.CONNECTION_UNABLE_TO_CONNECT); } try { wait(delay); } catch (InterruptedException ie) { // reset the interrupt flag Thread.currentThread().interrupt(); abandoned = true; // throw an unchecked exception which will hopefully not be ignored by the calling code throw new RuntimeException(GT.tr("Interrupted while attempting to connect.")); } } } } private final String url; private final Properties props; private Connection result; private Throwable resultException; private boolean abandoned; } /** * Create a connection from URL and properties. Always * does the connection work in the current thread without * enforcing a timeout, regardless of any timeout specified * in the properties. * * @param url the original URL * @param props the parsed/defaulted connection properties * @return a new connection * @throws SQLException if the connection could not be made */ private static Connection makeConnection(String url, Properties props) throws SQLException { return new OctopusConnection(hostSpecs(props), user(props), database(props), props, url); } /** * Returns true if the driver thinks it can open a connection to the * given URL. Typically, drivers will return true if they understand * the subprotocol specified in the URL and false if they don't. Our * protocols start with jdbc:postgresql: * * @param url the URL of the driver * @return true if this driver accepts the given URL * @see java.sql.Driver#acceptsURL */ public boolean acceptsURL(String url) { return parseURL(url, null) != null; } /** * The getPropertyInfo method is intended to allow a generic GUI * tool to discover what properties it should prompt a human for * in order to get enough information to connect to a database. * <p/> * <p>Note that depending on the values the human has supplied so * far, additional values may become necessary, so it may be necessary * to iterate through several calls to getPropertyInfo * * @param url the Url of the database to connect to * @param info a proposed list of tag/value pairs that will be sent on * connect open. * @return An array of DriverPropertyInfo objects describing * possible properties. This array may be an empty array if * no properties are required * @see java.sql.Driver#getPropertyInfo */ public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) { Properties copy = new Properties(info); Properties parse = parseURL(url, copy); if (parse != null) copy = parse; PGProperty[] knownProperties = PGProperty.values(); DriverPropertyInfo[] props = new DriverPropertyInfo[knownProperties.length]; for (int i = 0; i < props.length; ++i) props[i] = knownProperties[i].toDriverPropertyInfo(copy); return props; } public static final int MAJORVERSION = 0; // must be same as Octopus major version /** * Gets the drivers major version number * * @return the drivers major version number */ public int getMajorVersion() { return MAJORVERSION; } public static final int MINORVERSION = 1; // must be same as Octopus minor version /** * Get the drivers minor version number * * @return the drivers minor version number */ public int getMinorVersion() { return MINORVERSION; } /** * Returns the server version series of this driver and the * specific build number. */ public static String getVersion() { return "Octopus " + MAJORVERSION + "." + MINORVERSION + "JDBC4.1"; } /** * Report whether the driver is a genuine JDBC compliant driver. A * driver may only report "true" here if it passes the JDBC compliance * tests, otherwise it is required to return false. JDBC compliance * requires full support for the JDBC API and full support for SQL 92 * Entry Level. * <p/> * <p>For PostgreSQL, this is not yet possible, as we are not SQL92 * compliant (yet). */ public boolean jdbcCompliant() { return false; } private static final String[] PROTOCOLS = {"jdbc", "octopus"}; private static final String OCTOPUS_PROTOCOL = String.format("%s:%s:", (Object[]) PROTOCOLS); private static final String DEFAULT_OCTOPUS_PORT = "58000"; /** * Constructs a new DriverURL, splitting the specified URL into its * component parts * * @param url JDBC URL to parse * @param defaults Default properties * @return Properties with elements added from the url */ public static Properties parseURL(String url, Properties defaults) { Properties urlProps = new Properties(defaults); String urlServer = url; String urlArgs = ""; int qPos = url.indexOf('?'); if (qPos != -1) { urlServer = url.substring(0, qPos); urlArgs = url.substring(qPos + 1); } if (!urlServer.startsWith(OCTOPUS_PROTOCOL)) return null; urlServer = urlServer.substring(OCTOPUS_PROTOCOL.length()); if (urlServer.startsWith("//")) { // FIXME: Environment variables urlServer = urlServer.substring(2); int slashIdx = urlServer.indexOf('/'); if (slashIdx > -1) urlServer = urlServer.substring(0, slashIdx); StringBuilder hosts = new StringBuilder(); StringBuilder ports = new StringBuilder(); String[] addrs = urlServer.split(","); for (String addr : addrs) { addr = addr.trim(); String host; String port; int portIdx = addr.lastIndexOf(':'); if (portIdx > -1 && addr.lastIndexOf(']') < portIdx) { host = addr.substring(0, portIdx); port = addr.substring(portIdx + 1); if (port.isEmpty()) { port = DEFAULT_OCTOPUS_PORT; } else { try { Integer.parseInt(port); } catch (NumberFormatException e) { return null; } } } else { host = addr; port = DEFAULT_OCTOPUS_PORT; } if (host.isEmpty()) host = "localhost"; hosts.append(host).append(','); ports.append(port).append(','); } hosts.setLength(hosts.length() - 1); ports.setLength(hosts.length() - 1); urlProps.setProperty("PGHOST", hosts.toString()); urlProps.setProperty("PGPORT", ports.toString()); urlProps.setProperty("PGDBNAME", "<unknown>"); } else { urlProps.setProperty("PGHOST", "localhost"); urlProps.setProperty("PGPORT", DEFAULT_OCTOPUS_PORT); urlProps.setProperty("PGDBNAME", "<unknown>"); } // parse the args part of the url String[] args = urlArgs.split("&"); for (String token : args) { if (token.isEmpty()) continue; int pos = token.indexOf('='); if (pos > -1) urlProps.setProperty(token.substring(0, pos), token.substring(pos + 1)); else urlProps.setProperty(token, ""); } return urlProps; } /** * @return the address portion of the URL */ protected static HostSpec[] hostSpecs(Properties props) { String[] hosts = props.getProperty("PGHOST").split(","); String[] ports = props.getProperty("PGPORT").split(","); HostSpec[] hostSpecs = new HostSpec[hosts.length]; for (int i = 0; i < hostSpecs.length; ++i) hostSpecs[i] = new HostSpec(hosts[i], Integer.parseInt(ports[i])); return hostSpecs; } /** * @return the username of the URL */ protected static String user(Properties props) { return props.getProperty("user", ""); } /** * @return the database name of the URL */ protected static String database(Properties props) { return props.getProperty("PGDBNAME", ""); } /** * @return the timeout from the URL, in milliseconds */ protected static long timeout(Properties props) { final int msecPerSec = 1000; String timeout = PGProperty.LOGIN_TIMEOUT.get(props); if (timeout != null) { try { return (long) (Float.parseFloat(timeout) * msecPerSec); } catch (NumberFormatException e) { // Log level isn't set yet, so this doesn't actually // get printed. LOGGER.debug("Couldn't parse loginTimeout value: " + timeout); } } return (long) DriverManager.getLoginTimeout() * msecPerSec; } /* * This method was added in v6.5, and simply throws an SQLException * for an unimplemented method. I decided to do it this way while * implementing the JDBC2 extensions to JDBC, as it should help keep the * overall driver size down. * It now requires the call Class and the function name to help when the * driver is used with closed software that don't report the stack strace * @param callClass the call Class * @param functionName the name of the unimplemented function with the type * of its arguments * @return PSQLException with a localized message giving the complete * description of the unimplemeted function */ public static SQLFeatureNotSupportedException notImplemented(Class callClass, String functionName) { return new SQLFeatureNotSupportedException(GT.tr("Method {0} is not yet implemented.", callClass.getName() + "." + functionName), PSQLState.NOT_IMPLEMENTED.getState()); } /** * used to turn logging on to a certain level, can be called * by specifying fully qualified class ie org.postgresql.Driver.setLogLevel() * * @param logLevel sets the level which logging will respond to * OFF turn off logging * INFO being almost no messages * DEBUG most verbose */ public static void setLogLevel(int logLevel) { synchronized (Driver.class) { LOGGER.setLogLevel(logLevel); logLevelSet = true; } } public static int getLogLevel() { synchronized (Driver.class) { return LOGGER.getLogLevel(); } } public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException { throw notImplemented(this.getClass(), "getParentLogger()"); } public static SharedTimer getSharedTimer() { return sharedTimer; } /** * Register the driver against {@link DriverManager}. This is done automatically when the class is loaded. * Dropping the driver from DriverManager's list is possible using {@link #deregister()} method. * * @throws IllegalStateException if the driver is already registered * @throws SQLException if registering the driver fails */ public static void register() throws SQLException { if (isRegistered()) throw new IllegalStateException("Driver is already registered. It can only be registered once."); Driver driver = new Driver(); DriverManager.registerDriver(driver); Driver.registeredDriver = driver; } /** * According to JDBC specification, this driver is registered against {@link DriverManager} * when the class is loaded. To avoid leaks, this method allow unregistering the driver * so that the class can be gc'ed if necessary. * * @throws IllegalStateException if the driver is not registered * @throws SQLException if deregistering the driver fails */ public static void deregister() throws SQLException { if (!isRegistered()) throw new IllegalStateException("Driver is not registered (or it has not been registered using Driver.register() method)"); DriverManager.deregisterDriver(registeredDriver); registeredDriver = null; } /** * @return {@code true} if the driver is registered against {@link DriverManager} */ public static boolean isRegistered() { return registeredDriver != null; } }
/* Copyright 2009 Semantic Discovery, Inc. (www.semanticdiscovery.com) This file is part of the Semantic Discovery Toolkit. The Semantic Discovery Toolkit is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The Semantic Discovery Toolkit is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with The Semantic Discovery Toolkit. If not, see <http://www.gnu.org/licenses/>. */ package org.sd.atn; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.sd.util.tree.Tree; import org.sd.util.tree.TreeBuilderFactory; import org.sd.xml.DataProperties; import org.sd.xml.DomContextIterator; import org.sd.xml.DomContextIteratorFactory; import org.sd.xml.DomDocument; import org.sd.xml.DomElement; import org.sd.xml.DomTextIterationStrategy; import org.sd.xml.XmlFactory; import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * JUnit Tests for the ParseConfig class. * <p> * @author Spence Koehler */ public class TestParseConfig extends TestCase { public TestParseConfig(String name) { super(name); } // Tests for multipass CompoundParser functionality through ParseConfig. private final boolean debug = false; public void test1() throws IOException { if (debug) AtnState.setTrace(true); if (debug) System.out.println("test1a"); runTest("parseConfigTest.1.xml", "ParseConfigTest.1a", "test1", "input1", "expectedText-1a", "expectedTrees-1a", "1", null, false, null); if (debug) System.out.println("test1b"); runTest("parseConfigTest.1.xml", "ParseConfigTest.1b", "test1", "input1", "expectedText-1b", "expectedTrees-1b", "1", null, true, null); if (debug) System.out.println("test1c"); // run all compound parsers and their parsers in order with 'reset' input reconfiguration runTest("parseConfigTest.1.xml", "ParseConfigTest.1c", "test1", "input1", "expectedText-1c", "expectedTrees-1c", null, null, true, ParseSettingsFactory.ReconfigureStrategy.RESET); if (debug) System.out.println("test1d"); // run all compound parsers and their parsers in order with a root input reconfiguration runTest("parseConfigTest.1.xml", "ParseConfigTest.1d", "test1", "input1", "expectedText-1d", "expectedTrees-1d", null, null, true, ParseSettingsFactory.ReconfigureStrategy.ROOT); } public void test2() throws IOException { if (debug) System.out.println("test2a"); runTest("parseConfigTest.2.xml", "ParseConfigTest.2a", "test2", "input2", "expectedText-2a", "expectedTrees-2a", null, null, true, ParseSettingsFactory.ReconfigureStrategy.ROOT); } public void test3() throws IOException { if (debug) System.out.println("test3a"); runTest("parseConfigTest.3.xml", "ParseConfigTest.3a", "test3", "input3", "expectedText-3a", "expectedTrees-3a", "parser1", null, true, null); } private final void runTest(String resourceName, String name, String testId, String inputId, String expectedTextId, String expectedTreesId, String compoundParserId, String flowString, boolean onlySelected, ParseSettingsFactory.ReconfigureStrategy strategy) throws IOException { final InputStream inputStream = getInputStream(resourceName); final TestParamsContainer paramsContainer = new TestParamsContainer(inputStream); inputStream.close(); runTest(name, paramsContainer, testId, inputId, expectedTextId, expectedTreesId, compoundParserId, flowString, onlySelected, strategy); } private final void runTest(String name, TestParamsContainer paramsContainer, String testId, String inputId, String expectedTextId, String expectedTreesId, String compoundParserId, String flowString, boolean onlySelected, ParseSettingsFactory.ReconfigureStrategy strategy) throws IOException { final TestParams testParams = paramsContainer.testId2Params.get(testId); final ParseConfig parseConfig = buildParseConfig(testParams.getInput("parseConfig")); if (debug) parseConfig.setVerbose(true); final DomElement testInput = testParams.getInput(inputId); assertNotNull("No input '" + inputId + "' specified for test!", testInput != null); // if (configMutator != null) { // configMutator.mutate(parseConfig); // } final List<List<String>> expectedText = testParams.loadNestedLists(expectedTextId); final List<List<Tree<String>>> expectedTrees = loadExpectedTrees(testParams, expectedTreesId); runTest(name, parseConfig, testInput, compoundParserId, flowString, expectedText, expectedTrees, onlySelected, strategy); } private final void runTest(String name, ParseConfig parseConfig, DomElement inputXml, String compoundParserId, String flowStrings, List<List<String>> expectedText, List<List<Tree<String>>> expectedTrees, boolean onlySelected, ParseSettingsFactory.ReconfigureStrategy strategy) throws IOException { // prune the input from the config tree if (inputXml != null) inputXml.prune(); // build input final DomContextIterator input = buildDomContextIterator(inputXml); // build flow final String[] flow = buildFlow(flowStrings); // build ParseConfigTest runTest(name, parseConfig, input, compoundParserId,flow, expectedText, expectedTrees, onlySelected, strategy); } private final void runTest(String name, ParseConfig parseConfig, DomContextIterator input, String compoundParserId, String[] flow, List<List<String>> expectedText, List<List<Tree<String>>> expectedTrees, boolean onlySelected, ParseSettingsFactory.ReconfigureStrategy strategy) { MultiParseSettings settings = null; if (compoundParserId == null) { settings = parseConfig.buildSettings(strategy); } else { settings = parseConfig.buildSettings(compoundParserId, flow); } // build ParseConfigTest runTest(name, parseConfig, input, settings, expectedText, expectedTrees, onlySelected); } private final void runTest(String name, ParseConfig parseConfig, DomContextIterator input, MultiParseSettings settings, List<List<String>> expectedText, List<List<Tree<String>>> expectedTrees, boolean onlySelected) { // build ParseConfigTest final ParseConfigTest unitTest = new ParseConfigTest(name, parseConfig, input, settings, expectedText, expectedTrees, onlySelected); unitTest.runTest(); } private final InputStream getInputStream(String resourceName) { final String resource = "resources/" + resourceName; //System.out.println("loading config '" + this.getClass().getResource(resource) + "'..."); return this.getClass().getResourceAsStream(resource); } private final List<List<Tree<String>>> loadExpectedTrees(TestParams testParams, String id) { if (id == null) return null; final List<List<String>> treeStrings = testParams.loadNestedLists(id); return buildExpectedTrees(treeStrings); } private final ParseConfig buildParseConfig(String parseConfigXml) throws IOException { final DomDocument domDocument = XmlFactory.loadDocument(parseConfigXml, false); final DomElement parseConfigElement = domDocument.getDocumentDomElement(); return buildParseConfig(parseConfigElement); } private final ParseConfig buildParseConfig(DomElement parseConfigElement) { parseConfigElement.setDataProperties(new DataProperties()); parseConfigElement.getDataProperties().set("TEST_RESOURCES", this.getClass().getResource("resources").getFile()); final ParseConfig parseConfig = new ParseConfig(parseConfigElement); return parseConfig; } private final DomContextIterator buildDomContextIterator(String inputXml) throws IOException { final DomDocument domDocument = XmlFactory.loadDocument(inputXml, false); final DomElement inputXmlElement = domDocument.getDocumentDomElement(); return buildDomContextIterator(inputXmlElement); } private final DomContextIterator buildDomContextIterator(DomElement inputXmlElement) throws IOException { if (inputXmlElement == null) return null; final DomContextIterator input = DomContextIteratorFactory.getDomContextIterator(inputXmlElement, null, DomTextIterationStrategy.INSTANCE); return input; } private final String[] buildFlow(String flowStrings) { String[] flow = null; if (flowStrings != null) { flow = flowStrings.split("\\s*,\\s*"); } return flow; } private final List<List<String>> buildExpectedText(String[][] expectedTextStrings) { final List<List<String>> expectedText = expectedTextStrings == null ? null : new ArrayList<List<String>>(); if (expectedText != null) { for (String[] expectedTexts : expectedTextStrings) { final List<String> curTexts = new ArrayList<String>(); expectedText.add(curTexts); for (String expectedTextString : expectedTexts) { curTexts.add(expectedTextString); } } } return expectedText; } private final List<List<Tree<String>>> buildExpectedTrees(String[][] expectedTreeStrings) { final List<List<Tree<String>>> expectedTrees = expectedTreeStrings == null ? null : new ArrayList<List<Tree<String>>>(); if (expectedTrees != null) { for (String[] expectedTreess : expectedTreeStrings) { final List<Tree<String>> curTrees = new ArrayList<Tree<String>>(); expectedTrees.add(curTrees); for (String expectedTreeString : expectedTreess) { final Tree<String> expectedTree = TreeBuilderFactory.getStringTreeBuilder().buildTree(expectedTreeString); curTrees.add(expectedTree); } } } return expectedTrees; } private final List<List<Tree<String>>> buildExpectedTrees(List<List<String>> expectedTreeStrings) { List<List<Tree<String>>> expectedTrees = expectedTreeStrings == null ? null : new ArrayList<List<Tree<String>>>(); if (expectedTrees != null) { for (List<String> expectedTreess : expectedTreeStrings) { final List<Tree<String>> curTrees = new ArrayList<Tree<String>>(); expectedTrees.add(curTrees); for (String expectedTreeString : expectedTreess) { final Tree<String> expectedTree = TreeBuilderFactory.getStringTreeBuilder().buildTree(expectedTreeString); curTrees.add(expectedTree); } } } return expectedTrees; } private class ParseConfigTest { private String name; private ParseConfig parseConfig; private DomContextIterator input; private MultiParseSettings parseSettings; private List<List<String>> expectedText; private List<List<Tree<String>>> expectedTrees; private boolean onlySelected; boolean showText = true; boolean showParse = true; public ParseConfigTest(String name, ParseConfig parseConfig, DomContextIterator input, MultiParseSettings parseSettings, List<List<String>> expectedText, List<List<Tree<String>>> expectedTrees, boolean onlySelected) { this.name = name; this.parseSettings = parseSettings; this.parseConfig = parseConfig; this.input = input; this.expectedText = expectedText; this.expectedTrees = expectedTrees; this.onlySelected = onlySelected; } public void runTest() { //parseConfig.setVerbose("ParseConfigTest.3a".equals(name)); final ParseOutputCollector output = parseConfig.parse(input, parseSettings, null, null); if (expectedText != null) { final int numParseResults = output == null ? 0 : output.getParseResults() == null ? 0 : output.getParseResults().size(); int parseResultNum = 0; if (numParseResults > 0) { for (AtnParseResult parseResult : output.getParseResults()) { int expectedParseNum = 0; final int numParses = parseResult.getNumParses(); for (int parseNum = 0; parseNum < numParses; ++parseNum) { final AtnParse parse = parseResult.getParse(parseNum); if (onlySelected && parse.getSelected() || !onlySelected) { assertTrue(name + ": more parseResults (" + (parseResultNum + 1) + ") than expected (" + expectedText.size() + ")!" + " parse=" + parse.getParsedText(), parseResultNum < expectedText.size()); assertTrue(name + ": more parses(" + (expectedParseNum + 1) + ") than expected (" + expectedText.get(parseResultNum).size() + ") parseResultNum=" + parseResultNum + " parse=" + parse.getParsedText(), expectedParseNum < expectedText.get(parseResultNum).size()); // ParsedText assertEquals(name + ": Bad parsed text (#" + parseResultNum + ", " + expectedParseNum + ").", expectedText.get(parseResultNum).get(expectedParseNum), parse.getParsedText()); // ParseTree assertEquals(name + ": Bad parse (#" + parseResultNum + ", " + expectedParseNum + ").", expectedTrees.get(parseResultNum).get(expectedParseNum), parse.getParseTree()); ++expectedParseNum; } } ++parseResultNum; } } // final int numParseResults = output == null ? 0 : output.getParseResults() == null ? 0 : output.getParseResults().size(); assertEquals(name + ": Bad number of parses.", expectedText.size(), parseResultNum); } else { if (showText) { if (output.getParseResults() != null) { System.out.println("\n<list>"); for (AtnParseResult parseResult : output.getParseResults()) { System.out.println("\t<list>"); final int numParses = parseResult.getNumParses(); for (int parseNum = 0; parseNum < numParses; ++parseNum) { final AtnParse parse = parseResult.getParse(parseNum); if (onlySelected && parse.getSelected() || !onlySelected) { System.out.println("\t\t<item>" + parse.getParsedText() + "</item>"); } } System.out.println("\t</list>"); } System.out.println("</list>"); } if (showParse) { if (output.getParseResults() != null) { System.out.println("\n<list>"); for (AtnParseResult parseResult : output.getParseResults()) { System.out.println("\t<list>"); final int numParses = parseResult.getNumParses(); for (int parseNum = 0; parseNum < numParses; ++parseNum) { final AtnParse parse = parseResult.getParse(parseNum); if (onlySelected && parse.getSelected() || !onlySelected) { System.out.println("\t\t<item>" + parse.getParseTree().toString() + "</item>"); } } System.out.println("\t</list>"); } System.out.println("</list>"); } else { System.out.println("\n*No Parse Results*"); } } } } } } private final class TestParams { public final DomElement testElement; public TestParams(DomElement testElement) { this.testElement = testElement; } public DomElement getInput(String nodeName) { return (DomElement)testElement.selectSingleNode("inputs/" + nodeName); } public DomElement getExpectation(String nodeName) { return (DomElement)testElement.selectSingleNode("expectations/" + nodeName); } public List<List<String>> loadNestedLists(String expectationId) { if (expectationId == null) return null; final DomElement element = getExpectation(expectationId); final DomElement listElement = (DomElement)element.selectSingleNode("list"); return loadNestedLists(listElement); } private final List<List<String>> loadNestedLists(DomElement domElement) { final List<List<String>> result = new ArrayList<List<String>>(); final NodeList nodeList = domElement.selectNodes("list"); for (int i = 0; i < nodeList.getLength(); ++i) { final DomElement listNode = (DomElement)nodeList.item(i); result.add(loadList(listNode)); } return result; } private final List<String> loadList(DomElement listNode) { final List<String> result = new ArrayList<String>(); final NodeList nodeList = listNode.selectNodes("item"); for (int i = 0; i < nodeList.getLength(); ++i ) { final DomElement itemNode = (DomElement)nodeList.item(i); result.add(itemNode.getTextContent()); } return result; } } private final class TestParamsContainer { public final DomElement testRoot; public final Map<String, TestParams> testId2Params; public TestParamsContainer(InputStream testInputStream) throws IOException { final DomDocument domDocument = XmlFactory.loadDocument(testInputStream, false, null); this.testRoot = domDocument.getDocumentDomElement(); this.testId2Params = new HashMap<String, TestParams>(); final NodeList testNodes = testRoot.selectNodes("test"); for (int i = 0; i < testNodes.getLength(); ++i) { final DomElement testElement = (DomElement)testNodes.item(i); final DomElement testIdElement = (DomElement)testElement.selectSingleNode("id"); final String id = testIdElement.getTextContent(); testId2Params.put(id, new TestParams(testElement)); } } } public static Test suite() { TestSuite suite = new TestSuite(TestParseConfig.class); return suite; } public static void main(String[] args) { junit.textui.TestRunner.run(suite()); } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ /** * */ package com.pivotal.gemfirexd.internal.engine.distributed.metadata; import com.gemstone.gemfire.cache.Region; import com.pivotal.gemfirexd.internal.engine.Misc; import com.pivotal.gemfirexd.internal.engine.GemFireXDQueryObserver; import com.pivotal.gemfirexd.internal.engine.GemFireXDQueryObserverAdapter; import com.pivotal.gemfirexd.internal.engine.GemFireXDQueryObserverHolder; import com.pivotal.gemfirexd.internal.engine.distributed.metadata.DynamicKey; import com.pivotal.gemfirexd.internal.engine.distributed.metadata.JunctionQueryInfo; import com.pivotal.gemfirexd.internal.engine.distributed.metadata.QueryInfo; import com.pivotal.gemfirexd.internal.engine.distributed.metadata.SelectQueryInfo; import com.pivotal.gemfirexd.internal.engine.sql.execute.AbstractGemFireActivation; import com.pivotal.gemfirexd.internal.engine.store.CompactCompositeRegionKey; import com.pivotal.gemfirexd.internal.engine.store.GemFireContainer; import com.pivotal.gemfirexd.internal.engine.store.RegionKey; import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.sql.Activation; import com.pivotal.gemfirexd.internal.iapi.sql.conn.LanguageConnectionContext; import com.pivotal.gemfirexd.internal.iapi.types.*; import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedStatement; import com.pivotal.gemfirexd.internal.impl.sql.GenericPreparedStatement; import com.pivotal.gemfirexd.jdbc.JdbcTestBase; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; import junit.framework.TestSuite; import junit.textui.TestRunner; /** * @author Asif * */ @SuppressWarnings("serial") public class QueryInfoTest extends JdbcTestBase { private boolean callbackInvoked = false; public QueryInfoTest(String name) { super(name); } public static void main(String[] args) { TestRunner.run(new TestSuite(QueryInfoTest.class)); } public void testConvertibleToGet_1() throws SQLException, StandardException { Connection conn = getConnection(); createTableWithPrimaryKey(conn); String[] queries = new String[] { "Select * from orders where id =8", "Select id, cust_name, vol, security_id, num, addr from orders where id =8", "Select id, cust_name, vol, security_id, num, addr from orders where id > 8", "Select id as id, cust_name as cust_name, vol, security_id, num, addr from orders where id =8", /* "Select * from orders where id = 8 and cust_name = 'asif'" */}; final boolean[] getConvertibles = new boolean[] { true, true, false, true /* false */}; RegionKey gfk = getGemFireKey(8, Misc.getRegionForTable(getCurrentDefaultSchemaName() + ".ORDERS", true)); final Object[] primaryKeys = new Object[] { gfk, gfk, null, gfk /* false */}; GemFireXDQueryObserver old = null; try { old = GemFireXDQueryObserverHolder .setInstance(new GemFireXDQueryObserverAdapter() { private int index = 0; private DynamicKey dk = null; private QueryInfo qi; @Override public void queryInfoObjectFromOptmizedParsedTree(QueryInfo qInfo, GenericPreparedStatement gps, LanguageConnectionContext lcc) { QueryInfoTest.this.callbackInvoked = true; try { assertTrue(qInfo instanceof SelectQueryInfo); assertTrue(qInfo.isPrimaryKeyBased() == getConvertibles[index]); try { assertTrue(qInfo.createGFEActivation()== qInfo.isPrimaryKeyBased()); }catch(Exception e) { e.printStackTrace(); fail(e.toString()); } dk = (DynamicKey)qInfo.getPrimaryKey(); assertNotNull(qInfo.getRegion()); qi = qInfo; } catch (StandardException se) { fail("Test failed becaus eof exception=" + se); } } @Override public void beforeQueryExecution( EmbedStatement stmt, Activation activation) { try { if(primaryKeys[index] == null) { assertTrue(dk == null); } else { validatePrimaryKey(dk.getEvaluatedPrimaryKey(activation, ((GemFireContainer)qi.getRegion().getUserAttribute()), false), primaryKeys[index]); } ++index; } catch (StandardException e) { fail("Unexpected exception while validating key " + e); } } }); // Creating a statement object that we can use for running various // SQL statements commands against the database. Statement s = conn.createStatement(); for (int i = 0; i < queries.length; ++i) { try { s.executeQuery(queries[i]); } catch (SQLException e) { throw new SQLException(e.toString() + " Exception in executing query = " + queries[i], e); } } assertTrue(this.callbackInvoked); } finally { if (old != null) { GemFireXDQueryObserverHolder.setInstance(old); } } } public void testPreparedStatementConvertibleToGet_1() throws SQLException, StandardException { Connection conn = getConnection(); createTableWithPrimaryKey(conn); final boolean[] usedGemFireXDActivation = new boolean[] { false, false, false, false }; String[] queries = new String[] { "Select * from orders where id =?", "Select id, cust_name, vol, security_id, num, addr from orders where id =?", "Select id, cust_name, vol, security_id, num, addr from orders where id > ?", "Select id as id, cust_name as cust_name, vol, security_id, num, addr from orders where id =?", /* "Select * from orders where id = 8 and cust_name = 'asif'" */}; final boolean[] getConvertibles = new boolean[] { true, true, false, true /* false */}; // Rahul: Changes after single dvd key was implemented for region key. //RegionKey gfk = new CompositeRegionKey( new DataValueDescriptor[]{new SQLInteger(8)}); RegionKey gfk = getGemFireKey(8, Misc.getRegionForTable(getCurrentDefaultSchemaName() + ".ORDERS", true)); final Object[] primaryKeys = new Object[] { gfk, gfk, null, gfk /* false */}; GemFireXDQueryObserver old = null; try { old = GemFireXDQueryObserverHolder .setInstance(new GemFireXDQueryObserverAdapter() { private int index = 0; private DynamicKey dk = null; private QueryInfo qi; @Override public void queryInfoObjectFromOptmizedParsedTree(QueryInfo qInfo, GenericPreparedStatement gps, LanguageConnectionContext lcc) { QueryInfoTest.this.callbackInvoked = true; try { assertTrue(qInfo instanceof SelectQueryInfo); assertTrue(qInfo.isPrimaryKeyBased() == getConvertibles[index]); if (getConvertibles[index]) { assertTrue(qInfo.getPrimaryKey() instanceof DynamicKey); dk = (DynamicKey)qInfo.getPrimaryKey(); } else { dk = null; } assertNotNull(qInfo.getRegion()); ++index; qi = qInfo; } catch (StandardException se) { fail("Test failed becaus eof exception=" + se); } } @Override public void beforeGemFireResultSetExecuteOnActivation( AbstractGemFireActivation activation) { usedGemFireXDActivation[index - 1] = true; try { assertEquals(primaryKeys[index - 1], dk.getEvaluatedPrimaryKey( activation, ((GemFireContainer)qi.getRegion() .getUserAttribute()), false)); } catch (StandardException se) { fail("Test failed because of exception=" + se); } } }); // Creating a statement object that we can use for running various // SQL statements commands against the database. for (int i = 0; i < queries.length; ++i) { PreparedStatement s = conn.prepareStatement(queries[i]); try { s.setInt(1, 8); s.executeQuery(); } catch (SQLException e) { throw new SQLException(e.toString() + " Exception in executing query = " + queries[i], e .getSQLState()); } } for (int i = 0; i < queries.length; ++i) { assertTrue(usedGemFireXDActivation[i] == getConvertibles[i]); } assertTrue(this.callbackInvoked); } finally { if (old != null) { GemFireXDQueryObserverHolder.setInstance(old); } } } public void testConvertibleToGet_2() throws SQLException, StandardException { Connection conn = getConnection(); createTableWithPrimaryKey(conn); String[] queries = new String[] { "Select id as id1, cust_name , vol, security_id, num, addr from orders where id =8", "Select id as id, id ,id, id, id, id from orders where id =8", "Select id , vol, cust_name , security_id, num, addr from orders where id =8" /* "Select * from orders where id = 8 and cust_name = 'asif'" */}; final boolean[] getConvertibles = new boolean[] { true, true, true }; RegionKey gfk = getGemFireKey(8, Misc.getRegionForTable(getCurrentDefaultSchemaName() + ".ORDERS", true)); final Object[] primaryKeys = new Object[] { gfk, gfk, gfk /* false */}; GemFireXDQueryObserver old = null; try { old = GemFireXDQueryObserverHolder .setInstance(new GemFireXDQueryObserverAdapter() { private int index = 0; private DynamicKey dk = null; private QueryInfo qi; @Override public void queryInfoObjectFromOptmizedParsedTree(QueryInfo qInfo, GenericPreparedStatement gps, LanguageConnectionContext lcc) { QueryInfoTest.this.callbackInvoked = true; try { assertTrue(qInfo instanceof SelectQueryInfo); assertTrue(qInfo.isPrimaryKeyBased() == getConvertibles[index]); try { assertTrue(qInfo.createGFEActivation()== qInfo.isPrimaryKeyBased()); }catch(Exception e) { e.printStackTrace(); fail(e.toString()); } dk = (DynamicKey)qInfo.getPrimaryKey(); assertNotNull(qInfo.getRegion()); qi = qInfo; } catch (StandardException se) { fail("Test failed becaus eof exception=" + se); } } @Override public void beforeQueryExecution( EmbedStatement stmt, Activation activation) { try { if(primaryKeys[index] == null) { assertTrue(dk == null); } else { validatePrimaryKey(dk.getEvaluatedPrimaryKey(activation, (GemFireContainer)qi.getRegion().getUserAttribute(), false), primaryKeys[index]); } ++index; } catch (StandardException e) { fail("Unexpected exception while validating key " + e); } } }); // Creating a statement object that we can use for running various // SQL statements commands against the database. Statement s = conn.createStatement(); for (int i = 0; i < queries.length; ++i) { try { s.executeQuery(queries[i]); } catch (SQLException e) { throw new SQLException(e.toString() + " Exception in executing query = " + queries[i], e .getSQLState()); } } assertTrue(this.callbackInvoked); } finally { if (old != null) { GemFireXDQueryObserverHolder.setInstance(old); } } } public void testPreparedStatementConvertibleToGetForCompositeKeys() throws SQLException, IOException, StandardException { Connection conn = getConnection(); createTableWithCompositeKey(conn); final boolean[] usedGemFireXDActivation = new boolean[] { false, true }; String[] queries = new String[] { "Select * from orders where id =?", /* * "Select id, cust_name, vol, security_id, num, addr from orders where id * =8", "Select id, cust_name, vol, security_id, num, addr from orders where * id > 8", "Select id as id, cust_name as cust_name, vol, security_id, num, * addr from orders where id =8", */ "Select * from orders where id =? and cust_name= ?" /* "Select * from orders where id = 8 and cust_name = 'asif'" */}; Region<?, ?> tableRegion = Misc.getRegionForTable( getCurrentDefaultSchemaName() + ".orders".toUpperCase(), true); GemFireContainer container = (GemFireContainer)tableRegion .getUserAttribute(); RegionKey gfk = new CompactCompositeRegionKey(new DataValueDescriptor[] { new SQLInteger(8), new SQLVarchar("asif") }, container.getExtraTableInfo()); final boolean[] getConvertibles = new boolean[] { false, true }; final Object[] primaryKeys = new Object[] { null, gfk }; GemFireXDQueryObserver old = null; try { old = GemFireXDQueryObserverHolder .setInstance(new GemFireXDQueryObserverAdapter() { private int index = 0; private DynamicKey dk = null; private QueryInfo qi; @Override public void queryInfoObjectFromOptmizedParsedTree(QueryInfo qInfo, GenericPreparedStatement gps, LanguageConnectionContext lcc) { QueryInfoTest.this.callbackInvoked = true; try { assertTrue(qInfo instanceof SelectQueryInfo); assertTrue(qInfo.isPrimaryKeyBased() == getConvertibles[index]); if (getConvertibles[index]) { assertTrue(qInfo.getPrimaryKey() instanceof DynamicKey); dk = (DynamicKey)qInfo.getPrimaryKey(); } else { dk = null; } assertNotNull(qInfo.getRegion()); ++index; qi = qInfo; } catch (StandardException se) { fail("Test failed becaus eof exception=" + se); } } @Override public void beforeGemFireResultSetExecuteOnActivation( AbstractGemFireActivation activation) { usedGemFireXDActivation[index - 1] = true; try { validatePrimaryKey(primaryKeys[index - 1], dk .getEvaluatedPrimaryKey(activation, (GemFireContainer)qi .getRegion().getUserAttribute(), false)); } catch (StandardException se) { fail("Test failed because of exception=" + se); } } }); // Creating a statement object that we can use for running various // SQL statements commands against the database. for (int i = 0; i < queries.length; ++i) { PreparedStatement s = conn.prepareStatement(queries[i]); s.setInt(1, 8); if (i == 1) { s.setString(2, "asif"); } s.executeQuery(); } for (int i = 0; i < queries.length; ++i) { assertTrue(usedGemFireXDActivation[i] == getConvertibles[i]); } assertTrue(this.callbackInvoked); } finally { if (old != null) { GemFireXDQueryObserverHolder.setInstance(old); } } } public void testConvertibleToGetForCompositeKeys() throws SQLException, IOException, StandardException { Connection conn = getConnection(); createTableWithCompositeKey(conn); String[] queries = new String[] { "Select * from orders where id =8", /* * "Select id, cust_name, vol, security_id, num, addr from orders where id * =8", "Select id, cust_name, vol, security_id, num, addr from orders where * id > 8", "Select id as id, cust_name as cust_name, vol, security_id, num, * addr from orders where id =8", */ "Select * from orders where id =8 and cust_name='asif'" /* "Select * from orders where id = 8 and cust_name = 'asif'" */}; final boolean[] getConvertibles = new boolean[] { false, true }; //Remember to captilize the name. final Region<?, ?> tableRegion = Misc.getRegionForTable( getCurrentDefaultSchemaName() + ".orders".toUpperCase(), true); RegionKey gfk = new CompactCompositeRegionKey(new DataValueDescriptor[] { new SQLInteger(8), new SQLVarchar("asif") }, ((GemFireContainer)tableRegion.getUserAttribute()).getExtraTableInfo()); final Object[] primaryKeys = new Object[] { null, gfk}; GemFireXDQueryObserver old = null; try { old = GemFireXDQueryObserverHolder .setInstance(new GemFireXDQueryObserverAdapter() { private int index = 0; private DynamicKey dk = null; private QueryInfo qi; @Override public void queryInfoObjectFromOptmizedParsedTree(QueryInfo qInfo, GenericPreparedStatement gps, LanguageConnectionContext lcc) { QueryInfoTest.this.callbackInvoked = true; try { assertTrue(qInfo instanceof SelectQueryInfo); assertTrue(qInfo.isPrimaryKeyBased() == getConvertibles[index]); try { assertTrue(qInfo.createGFEActivation()== qInfo.isPrimaryKeyBased()); }catch(Exception e) { e.printStackTrace(); fail(e.toString()); } dk = (DynamicKey)qInfo.getPrimaryKey(); assertNotNull(qInfo.getRegion()); qi = qInfo; } catch (StandardException se) { fail("Test failed becaus eof exception=" + se); } } @Override public void beforeQueryExecution( EmbedStatement stmt, Activation activation) { try { if( primaryKeys[index] == null ) { assertTrue(dk == null); } else { validatePrimaryKey(dk.getEvaluatedPrimaryKey(activation, (GemFireContainer)qi.getRegion().getUserAttribute(), false), primaryKeys[index]); } ++index; } catch (StandardException e) { fail("Unexpected exception while validating key " + e); } } }); // Creating a statement object that we can use for running various // SQL statements commands against the database. Statement s = conn.createStatement(); for (int i = 0; i < queries.length; ++i) { try { s.executeQuery(queries[i]); } catch (SQLException e) { throw new SQLException(e.toString() + " Exception in executing query = " + queries[i], e .getSQLState()); } } assertTrue(this.callbackInvoked); } finally { if (old != null) { GemFireXDQueryObserverHolder.setInstance(old); } } } public void testInconvertibleToGetQuery() throws Exception { Connection conn = getConnection(); createTableWithPrimaryKey(conn); String[] queries = new String[] { "Select * from orders where id > 8" /* "Select * from orders where id = 8 and cust_name = 'asif'" */}; GemFireXDQueryObserver old = null; try { old = GemFireXDQueryObserverHolder .setInstance(new GemFireXDQueryObserverAdapter() { private int index = 0; @Override public void queryInfoObjectFromOptmizedParsedTree(QueryInfo qInfo, GenericPreparedStatement gps, LanguageConnectionContext lcc) { QueryInfoTest.this.callbackInvoked = true; assertNotNull(qInfo.getRegion()); ++index; } }); // Creating a statement object that we can use for running various // SQL statements commands against the database. Statement s = conn.createStatement(); for (int i = 0; i < queries.length; ++i) { try { s.executeQuery(queries[i]); } catch (SQLException e) { throw new SQLException(e.toString() + " Exception in executing query = " + queries[i], e .getSQLState()); } } assertTrue(this.callbackInvoked); } finally { if (old != null) { GemFireXDQueryObserverHolder.setInstance(old); } } } private void validatePrimaryKey(Object actual, Object expected) { if (expected instanceof byte[]) { assertTrue(actual instanceof byte[]); byte[] act = (byte[])actual; byte[] expct = (byte[])expected; assertTrue(act.length == expct.length); for (int i = 0; i < act.length; ++i) { assertEquals(act[i], expct[i]); } } else { assertEquals("got: " + actual, actual, expected); } } public void createTableWithPrimaryKey(Connection conn) throws SQLException { Statement s = conn.createStatement(); s.execute("drop table if exists orders"); // We create a table... s.execute("create table orders" + "(id int PRIMARY KEY, cust_name varchar(200), vol int, " + "security_id varchar(10), num int, addr varchar(100))"); s.close(); } public void createTableWithCompositeKey(Connection conn) throws SQLException { Statement s = conn.createStatement(); s.execute("drop table if exists orders"); // We create a table... s.execute("create table orders" + "(id int , cust_name varchar(200), vol int, " + "security_id varchar(10), num int, addr varchar(100)," + " Primary Key (id, cust_name))"); s.close(); } /** * This is to test the sorting logic used in JunctionQueryInfo. The code is * copy pasted from the function * * @see JunctionQueryInfo.sortOperandInIncreasingColumnPosition Need to find a * better way to test it out . Creating dummy JunctionQueryInfo and its * other artifacts is a bit painful * */ public void testSortLogic() { int test1[] = new int[] { 0, 1, 2, 3 }; this.sort(test1); for (int i = 0; i < 4; ++i) { assertEquals(i, test1[i]); } int test2[] = new int[] { 0 }; this.sort(test2); for (int i = 0; i < 1; ++i) { assertEquals(i, test2[i]); } int test3[] = new int[] { 0, 1 }; this.sort(test3); for (int i = 0; i < 2; ++i) { assertEquals(i, test3[i]); } int test4[] = new int[] { 1, 0 }; this.sort(test4); for (int i = 0; i < 2; ++i) { assertEquals(i, test4[i]); } int test5[] = new int[] { 1, 2, 0 }; this.sort(test5); for (int i = 0; i < 3; ++i) { assertEquals(i, test5[i]); } int test6[] = new int[] { 1, 2, 0, 5, 4, 3, 9, 6, 7, 8 }; this.sort(test6); for (int i = 0; i < 10; ++i) { assertEquals(i, test6[i]); } } /** * This is to test the sorting logic used in JunctionQueryInfo. The code is * copy pasted from the function * * @see JunctionQueryInfo.sortOperandInIncreasingColumnPosition Need to find a * better way to test it out . Creating dummy JunctionQueryInfo and its * other artifacts is a bit painful * */ private void sort(int[] test) { // The checks before this function is invoked // have ensured that all the operands are of type ComparisonQueryInfo // and of the form var = constant. Also need for sorting will not arise // if there are only two operands int len = test.length; outer: for (int j = 0; j < len; ++j) { int toSort = test[j]; inner: for (int i = j - 1; i > -1; --i) { int currSorted = test[i]; if (currSorted < toSort) { // Found the position // Pick the next to sort if (i + 1 != j) { test[i + 1] = toSort; } break inner; } else { // Advance the current sorted to next & create an hole test[i + 1] = currSorted; if (i == 0) { //Reached the end just set the toSort at 0 test[0] = toSort; } } } } } @Override public void tearDown() throws Exception { this.callbackInvoked = false; super.tearDown(); } }
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.common.dto; import java.io.Serializable; public abstract class AbstractStructureDto implements HasId, Serializable { private static final long serialVersionUID = -983834466038615147L; protected String id; protected String applicationId; protected String endpointGroupId; protected int sequenceNumber; protected String description; protected long createdTime; protected long lastModifyTime; protected long activatedTime; protected long deactivatedTime; protected String createdUsername; protected String modifiedUsername; protected String activatedUsername; protected String deactivatedUsername; protected String body; protected UpdateStatus status; protected long endpointCount; protected Long version; public AbstractStructureDto() { } public AbstractStructureDto(AbstractStructureDto other) { this.id = other.id; this.applicationId = other.applicationId; this.endpointGroupId = other.endpointGroupId; this.sequenceNumber = other.sequenceNumber; this.description = other.description; this.createdTime = other.createdTime; this.lastModifyTime = other.lastModifyTime; this.activatedTime = other.activatedTime; this.deactivatedTime = other.deactivatedTime; this.createdUsername = other.createdUsername; this.modifiedUsername = other.modifiedUsername; this.activatedUsername = other.activatedUsername; this.deactivatedUsername = other.deactivatedUsername; this.body = other.body; this.status = other.status; this.endpointCount = other.endpointCount; this.version = other.version; } @Override public String getId() { return id; } @Override public void setId(String id) { this.id = id; } public String getApplicationId() { return applicationId; } public void setApplicationId(String applicationId) { this.applicationId = applicationId; } public String getEndpointGroupId() { return endpointGroupId; } public void setEndpointGroupId(String endpointGroupId) { this.endpointGroupId = endpointGroupId; } public int getSequenceNumber() { return sequenceNumber; } public void setSequenceNumber(int sequenceNumber) { this.sequenceNumber = sequenceNumber; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public long getCreatedTime() { return createdTime; } public void setCreatedTime(long createdTime) { this.createdTime = createdTime; } public long getLastModifyTime() { return lastModifyTime; } public void setLastModifyTime(long lastModifyTime) { this.lastModifyTime = lastModifyTime; } public long getActivatedTime() { return activatedTime; } public void setActivatedTime(long activatedTime) { this.activatedTime = activatedTime; } public long getDeactivatedTime() { return deactivatedTime; } public void setDeactivatedTime(long deactivatedTime) { this.deactivatedTime = deactivatedTime; } public String getCreatedUsername() { return createdUsername; } public void setCreatedUsername(String createdUsername) { this.createdUsername = createdUsername; } public String getModifiedUsername() { return modifiedUsername; } public void setModifiedUsername(String modifiedUsername) { this.modifiedUsername = modifiedUsername; } public String getActivatedUsername() { return activatedUsername; } public void setActivatedUsername(String activatedUsername) { this.activatedUsername = activatedUsername; } public String getDeactivatedUsername() { return deactivatedUsername; } public void setDeactivatedUsername(String deactivatedUsername) { this.deactivatedUsername = deactivatedUsername; } public void setBody(String body) { this.body = body; } public String getBody() { return body; } public UpdateStatus getStatus() { return status; } public void setStatus(UpdateStatus status) { this.status = status; } public long getEndpointCount() { return endpointCount; } public void setEndpointCount(long endpointCount) { this.endpointCount = endpointCount; } public Long getVersion() { return version; } public void setVersion(Long version) { this.version = version; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AbstractStructureDto that = (AbstractStructureDto) o; if (lastModifyTime != that.lastModifyTime) { return false; } if (sequenceNumber != that.sequenceNumber) { return false; } if (applicationId != null ? !applicationId.equals(that.applicationId) : that.applicationId != null) { return false; } if (body != null ? !body.equals(that.body) : that.body != null) { return false; } if (endpointGroupId != null ? !endpointGroupId.equals(that.endpointGroupId) : that.endpointGroupId != null) { return false; } if (status != that.status) { return false; } return true; } @Override public int hashCode() { int result = applicationId != null ? applicationId.hashCode() : 0; result = 31 * result + (endpointGroupId != null ? endpointGroupId.hashCode() : 0); result = 31 * result + sequenceNumber; result = 31 * result + (int) (lastModifyTime ^ (lastModifyTime >>> 32)); result = 31 * result + (body != null ? body.hashCode() : 0); result = 31 * result + (status != null ? status.hashCode() : 0); return result; } @Override public String toString() { return "AbstractStructureDto{" + "id='" + id + '\'' + ", applicationId='" + applicationId + '\'' + ", endpointGroupId='" + endpointGroupId + '\'' + ", sequenceNumber=" + sequenceNumber + ", lastModifyTime=" + lastModifyTime + ", status=" + status + ", version=" + version + '}'; } public int incrementSeqNum() { return ++sequenceNumber; } }
// // ======================================================================== // Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.server.handler; import java.io.IOException; import java.lang.reflect.Array; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.HandlerContainer; import org.eclipse.jetty.server.HttpChannelState; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.util.ArrayTernaryTrie; import org.eclipse.jetty.util.ArrayUtil; import org.eclipse.jetty.util.Trie; import org.eclipse.jetty.util.annotation.ManagedObject; import org.eclipse.jetty.util.annotation.ManagedOperation; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; /* ------------------------------------------------------------ */ /** ContextHandlerCollection. * * This {@link org.eclipse.jetty.server.handler.HandlerCollection} is creates a * {@link org.eclipse.jetty.http.PathMap} to it's contained handlers based * on the context path and virtual hosts of any contained {@link org.eclipse.jetty.server.handler.ContextHandler}s. * The contexts do not need to be directly contained, only children of the contained handlers. * Multiple contexts may have the same context path and they are called in order until one * handles the request. * */ @ManagedObject("Context Handler Collection") public class ContextHandlerCollection extends HandlerCollection { private static final Logger LOG = Log.getLogger(ContextHandlerCollection.class); private final ConcurrentMap<ContextHandler,Handler> _contextBranches = new ConcurrentHashMap<>(); private volatile Trie<Map.Entry<String,Branch[]>> _pathBranches; private Class<? extends ContextHandler> _contextClass = ContextHandler.class; /* ------------------------------------------------------------ */ public ContextHandlerCollection() { super(true); } /* ------------------------------------------------------------ */ /** * Remap the context paths. */ @ManagedOperation("update the mapping of context path to context") public void mapContexts() { _contextBranches.clear(); if (getHandlers()==null) { _pathBranches=new ArrayTernaryTrie<>(false,16); return; } // Create map of contextPath to handler Branch Map<String,Branch[]> map = new HashMap<>(); for (Handler handler:getHandlers()) { Branch branch=new Branch(handler); for (String contextPath : branch.getContextPaths()) { Branch[] branches=map.get(contextPath); map.put(contextPath, ArrayUtil.addToArray(branches, branch, Branch.class)); } for (ContextHandler context : branch.getContextHandlers()) _contextBranches.putIfAbsent(context, branch.getHandler()); } // Sort the branches so those with virtual hosts are considered before those without for (Map.Entry<String,Branch[]> entry: map.entrySet()) { Branch[] branches=entry.getValue(); Branch[] sorted=new Branch[branches.length]; int i=0; for (Branch branch:branches) if (branch.hasVirtualHost()) sorted[i++]=branch; for (Branch branch:branches) if (!branch.hasVirtualHost()) sorted[i++]=branch; entry.setValue(sorted); } // Loop until we have a big enough trie to hold all the context paths int capacity=512; Trie<Map.Entry<String,Branch[]>> trie; loop: while(true) { trie=new ArrayTernaryTrie<>(false,capacity); for (Map.Entry<String,Branch[]> entry: map.entrySet()) { if (!trie.put(entry.getKey().substring(1),entry)) { capacity+=512; continue loop; } } break loop; } if (LOG.isDebugEnabled()) { for (String ctx : trie.keySet()) LOG.debug("{}->{}",ctx,Arrays.asList(trie.get(ctx).getValue())); } _pathBranches=trie; } /* ------------------------------------------------------------ */ /* * @see org.eclipse.jetty.server.server.handler.HandlerCollection#setHandlers(org.eclipse.jetty.server.server.Handler[]) */ @Override public void setHandlers(Handler[] handlers) { super.setHandlers(handlers); if (isStarted()) mapContexts(); } /* ------------------------------------------------------------ */ @Override protected void doStart() throws Exception { mapContexts(); super.doStart(); } /* ------------------------------------------------------------ */ /* * @see org.eclipse.jetty.server.server.Handler#handle(java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, int) */ @Override public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { Handler[] handlers = getHandlers(); if (handlers==null || handlers.length==0) return; HttpChannelState async = baseRequest.getHttpChannelState(); if (async.isAsync()) { ContextHandler context=async.getContextHandler(); if (context!=null) { Handler branch = _contextBranches.get(context); if (branch==null) context.handle(target,baseRequest,request, response); else branch.handle(target, baseRequest, request, response); return; } } // data structure which maps a request to a context; first-best match wins // { context path => [ context ] } // } if (target.startsWith("/")) { int limit = target.length()-1; while (limit>=0) { // Get best match Map.Entry<String,Branch[]> branches = _pathBranches.getBest(target,1,limit); if (branches==null) break; int l=branches.getKey().length(); if (l==1 || target.length()==l || target.charAt(l)=='/') { for (Branch branch : branches.getValue()) { branch.getHandler().handle(target,baseRequest, request, response); if (baseRequest.isHandled()) return; } } limit=l-2; } } else { // This may not work in all circumstances... but then I think it should never be called for (int i=0;i<handlers.length;i++) { handlers[i].handle(target,baseRequest, request, response); if ( baseRequest.isHandled()) return; } } } /* ------------------------------------------------------------ */ /** Add a context handler. * @param contextPath The context path to add * @return the ContextHandler just added */ public ContextHandler addContext(String contextPath,String resourceBase) { try { ContextHandler context = _contextClass.newInstance(); context.setContextPath(contextPath); context.setResourceBase(resourceBase); addHandler(context); return context; } catch (Exception e) { LOG.debug(e); throw new Error(e); } } /* ------------------------------------------------------------ */ /** * @return The class to use to add new Contexts */ public Class<?> getContextClass() { return _contextClass; } /* ------------------------------------------------------------ */ /** * @param contextClass The class to use to add new Contexts */ public void setContextClass(Class<? extends ContextHandler> contextClass) { if (contextClass ==null || !(ContextHandler.class.isAssignableFrom(contextClass))) throw new IllegalArgumentException(); _contextClass = contextClass; } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ private final static class Branch { private final Handler _handler; private final ContextHandler[] _contexts; Branch(Handler handler) { _handler=handler; if (handler instanceof ContextHandler) { _contexts = new ContextHandler[]{(ContextHandler)handler}; } else if (handler instanceof HandlerContainer) { Handler[] contexts=((HandlerContainer)handler).getChildHandlersByClass(ContextHandler.class); _contexts = new ContextHandler[contexts.length]; System.arraycopy(contexts, 0, _contexts, 0, contexts.length); } else _contexts = new ContextHandler[0]; } Set<String> getContextPaths() { Set<String> set = new HashSet<String>(); for (ContextHandler context:_contexts) set.add(context.getContextPath()); return set; } boolean hasVirtualHost() { for (ContextHandler context:_contexts) if (context.getVirtualHosts()!=null && context.getVirtualHosts().length>0) return true; return false; } ContextHandler[] getContextHandlers() { return _contexts; } Handler getHandler() { return _handler; } @Override public String toString() { return String.format("{%s,%s}",_handler,Arrays.asList(_contexts)); } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ide.favoritesTreeView; import com.intellij.ide.IdeBundle; import com.intellij.ide.favoritesTreeView.actions.AddToFavoritesAction; import com.intellij.ide.projectView.impl.*; import com.intellij.ide.projectView.impl.nodes.LibraryGroupElement; import com.intellij.ide.projectView.impl.nodes.NamedLibraryElement; import com.intellij.ide.util.treeView.AbstractTreeNode; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ProjectComponent; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.*; import com.intellij.openapi.ui.InputValidator; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.*; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.ArrayUtil; import com.intellij.util.Consumer; import com.intellij.util.SmartList; import com.intellij.util.TreeItem; import com.intellij.util.containers.ContainerUtil; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.util.*; import java.util.function.Function; import static com.intellij.ide.favoritesTreeView.FavoritesListProvider.EP_NAME; public final class FavoritesManager implements ProjectComponent, JDOMExternalizable { // fav list name -> list of (root: root url, root class) private final Map<String, List<TreeItem<Pair<AbstractUrl, String>>>> myName2FavoritesRoots = new TreeMap<>(); private final List<String> myFavoritesRootsOrder = new ArrayList<>(); private final Map<String, String> myDescriptions = new HashMap<>(); private final Project myProject; private final List<FavoritesListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList(); private final FavoritesViewSettings myViewSettings = new FavoritesViewSettings(); private Map<String, FavoritesListProvider> myProviders; @NotNull private Map<String, FavoritesListProvider> getProviders() { if (myProviders != null) { return myProviders; } myProviders = new HashMap<>(); if (!ApplicationManager.getApplication().isUnitTestMode()) { final FavoritesListProvider[] providers = EP_NAME.getExtensions(myProject); for (FavoritesListProvider provider : providers) { myProviders.put(provider.getListName(myProject), provider); } final MyRootsChangeAdapter myPsiTreeChangeAdapter = new MyRootsChangeAdapter(); PsiManager.getInstance(myProject).addPsiTreeChangeListener(myPsiTreeChangeAdapter, myProject); if (myName2FavoritesRoots.isEmpty()) { myDescriptions.put(myProject.getName(), "auto-added"); createNewList(myProject.getName()); } } return myProviders; } private void rootsChanged() { for (FavoritesListener listener : myListeners) { listener.rootsChanged(); } } private void listAdded(@NotNull String listName) { for (FavoritesListener listener : myListeners) { listener.listAdded(listName); } } private void listRemoved(@NotNull String listName) { for (FavoritesListener listener : myListeners) { listener.listRemoved(listName); } } public void renameList(final Project project, @NotNull String listName) { final String newName = Messages .showInputDialog(project, IdeBundle.message("prompt.input.favorites.list.new.name", listName), IdeBundle.message("title.rename.favorites.list"), Messages.getInformationIcon(), listName, new InputValidator() { @Override public boolean checkInput(String inputString) { return inputString != null && inputString.trim().length() > 0; } @Override public boolean canClose(String inputString) { inputString = inputString.trim(); if (myName2FavoritesRoots.keySet().contains(inputString) || getProviders().keySet().contains(inputString)) { Messages.showErrorDialog(project, IdeBundle.message("error.favorites.list.already.exists", inputString.trim()), IdeBundle.message("title.unable.to.add.favorites.list")); return false; } return !inputString.isEmpty(); } }); if (newName != null && renameFavoritesList(listName, newName)) { rootsChanged(); } } public void addFavoritesListener(final FavoritesListener listener, @NotNull Disposable parent) { myListeners.add(listener); listener.rootsChanged(); Disposer.register(parent, new Disposable() { @Override public void dispose() { myListeners.remove(listener); } }); } List<AbstractTreeNode<?>> createRootNodes() { List<AbstractTreeNode<?>> result = new ArrayList<>(); for (String listName : myFavoritesRootsOrder) { result.add(new FavoritesListNode(myProject, listName, myDescriptions.get(listName))); } ArrayList<FavoritesListProvider> providers = new ArrayList<>(getProviders().values()); Collections.sort(providers); for (FavoritesListProvider provider : providers) { result.add(provider.createFavoriteListNode(myProject)); } return result; } public static FavoritesManager getInstance(Project project) { return project.getComponent(FavoritesManager.class); } public FavoritesManager(Project project) { myProject = project; } @NotNull public List<String> getAvailableFavoritesListNames() { return new ArrayList<>(myFavoritesRootsOrder); } public synchronized void createNewList(@NotNull String listName) { myName2FavoritesRoots.put(listName, new ArrayList<>()); myFavoritesRootsOrder.add(listName); listAdded(listName); } public synchronized void fireListeners(@NotNull final String listName) { rootsChanged(); } @NotNull public FavoritesViewSettings getViewSettings() { return myViewSettings; } public synchronized void removeFavoritesList(@NotNull String name) { myName2FavoritesRoots.remove(name); myFavoritesRootsOrder.remove(name); myDescriptions.remove(name); listRemoved(name); } @NotNull public List<TreeItem<Pair<AbstractUrl, String>>> getFavoritesListRootUrls(@NotNull String name) { final List<TreeItem<Pair<AbstractUrl, String>>> pairs = myName2FavoritesRoots.get(name); return pairs == null ? new ArrayList<>() : pairs; } public synchronized boolean addRoots(@NotNull String name, Module moduleContext, @NotNull Object elements) { Collection<AbstractTreeNode<?>> nodes = AddToFavoritesAction.createNodes(myProject, moduleContext, elements, true, getViewSettings()); return !nodes.isEmpty() && addRoots(name, nodes); } public synchronized Comparator<FavoriteTreeNodeDescriptor> getCustomComparator(@NotNull final String name) { return getProviders().get(name); } private Pair<AbstractUrl, String> createPairForNode(AbstractTreeNode node) { final String className = node.getClass().getName(); final Object value = node.getValue(); final AbstractUrl url = createUrlByElement(value, myProject); if (url == null) return null; return Pair.create(url, className); } public boolean addRoots(final String name, final Collection<? extends AbstractTreeNode<?>> nodes) { final Collection<TreeItem<Pair<AbstractUrl, String>>> list = getFavoritesListRootUrls(name); final HashSet<AbstractUrl> set = new HashSet<>(ContainerUtil.map(list, item -> item.getData().getFirst())); for (AbstractTreeNode node : nodes) { final Pair<AbstractUrl, String> pair = createPairForNode(node); if (pair != null) { if (set.contains(pair.getFirst())) continue; final TreeItem<Pair<AbstractUrl, String>> treeItem = new TreeItem<>(pair); list.add(treeItem); set.add(pair.getFirst()); appendChildNodes(node, treeItem); } } rootsChanged(); return true; } public boolean canAddRoots(@NotNull String name, @NotNull Collection<? extends AbstractTreeNode<?>> nodes) { final Collection<TreeItem<Pair<AbstractUrl, String>>> list = getFavoritesListRootUrls(name); final HashSet<AbstractUrl> set = new HashSet<>(ContainerUtil.map(list, item -> item.getData().getFirst())); for (AbstractTreeNode node : nodes) { final Pair<AbstractUrl, String> pair = createPairForNode(node); if (pair != null && !set.contains(pair.getFirst())) return true; } return false; } private void appendChildNodes(AbstractTreeNode node, TreeItem<Pair<AbstractUrl, String>> treeItem) { final Collection<? extends AbstractTreeNode<?>> children = node.getChildren(); for (AbstractTreeNode child : children) { final TreeItem<Pair<AbstractUrl, String>> childTreeItem = new TreeItem<>(createPairForNode(child)); treeItem.addChild(childTreeItem); appendChildNodes(child, childTreeItem); } } private <T> boolean findListToRemoveFrom(@NotNull String name, @NotNull final List<T> elements, final Function<? super T, ? extends AbstractUrl> convertor) { Collection<TreeItem<Pair<AbstractUrl, String>>> list = getFavoritesListRootUrls(name); if (elements.size() > 1) { final List<T> sublist = elements.subList(0, elements.size() - 1); for (T obj : sublist) { AbstractUrl objUrl = convertor.apply(obj); final TreeItem<Pair<AbstractUrl, String>> item = findNextItem(objUrl, list); if (item == null || item.getChildren() == null) return false; list = item.getChildren(); } } TreeItem<Pair<AbstractUrl, String>> found = null; AbstractUrl url = convertor.apply(elements.get(elements.size() - 1)); if (url == null) return false; for (TreeItem<Pair<AbstractUrl, String>> pair : list) { if (url.equals(pair.getData().getFirst())) { found = pair; break; } } if (found != null) { list.remove(found); rootsChanged(); return true; } return false; } public synchronized boolean removeRoot(@NotNull String name, @NotNull List<? extends AbstractTreeNode<?>> elements) { Function<AbstractTreeNode, AbstractUrl> convertor = obj -> createUrlByElement(obj.getValue(), myProject); boolean result = true; for (AbstractTreeNode element : elements) { final List<AbstractTreeNode<?>> path = TaskDefaultFavoriteListProvider.getPathToUsualNode(element); result &= findListToRemoveFrom(name, path.subList(1, path.size()), convertor); } return result; } private static TreeItem<Pair<AbstractUrl, String>> findNextItem(AbstractUrl url, Collection<? extends TreeItem<Pair<AbstractUrl, String>>> list) { for (TreeItem<Pair<AbstractUrl, String>> pair : list) { if (url.equals(pair.getData().getFirst())) { return pair; } } return null; } private boolean renameFavoritesList(@NotNull String oldName, @NotNull String newName) { List<TreeItem<Pair<AbstractUrl, String>>> list = myName2FavoritesRoots.remove(oldName); int index = myFavoritesRootsOrder.indexOf(oldName); if (index != -1 && newName.length() > 0) { myFavoritesRootsOrder.remove(oldName); myFavoritesRootsOrder.remove(newName); myFavoritesRootsOrder.add(index, newName); } if (list != null && newName.length() > 0) { myName2FavoritesRoots.put(newName, list); String description = myDescriptions.remove(oldName); if (description != null) { myDescriptions.put(newName, description); } rootsChanged(); return true; } return false; } public void setOrder(@NotNull String nameToOrder, String anchorName, boolean above) { if (!canReorder(nameToOrder, anchorName, above)) return; int index = myFavoritesRootsOrder.indexOf(anchorName); int toRemove = myFavoritesRootsOrder.indexOf(nameToOrder); myFavoritesRootsOrder.add(above? index : index +1, nameToOrder); myFavoritesRootsOrder.remove(toRemove > index ? toRemove+1 : toRemove); rootsChanged(); } public boolean canReorder(String nameToOrder, String anchorName, boolean above) { int index = myFavoritesRootsOrder.indexOf(anchorName); int toReorder = myFavoritesRootsOrder.indexOf(nameToOrder); if (index ==-1 || toReorder ==-1 || index == toReorder) return false; if (toReorder == index -1 && above) return false; if (toReorder == index + 1 && !above) return false; return true; } @Override @NotNull public String getComponentName() { return "FavoritesManager"; } @Nullable public FavoritesListProvider getListProvider(@Nullable String name) { return getProviders().get(name); } @Override public void readExternal(Element element) throws InvalidDataException { myName2FavoritesRoots.clear(); for (Element list : element.getChildren(ELEMENT_FAVORITES_LIST)) { final String name = list.getAttributeValue(ATTRIBUTE_NAME); List<TreeItem<Pair<AbstractUrl, String>>> roots = readRoots(list, myProject); myName2FavoritesRoots.put(name, roots); myFavoritesRootsOrder.add(name); } DefaultJDOMExternalizer.readExternal(this, element); } @NonNls private static final String CLASS_NAME = "klass"; @NonNls private static final String FAVORITES_ROOT = "favorite_root"; @NonNls private static final String ELEMENT_FAVORITES_LIST = "favorites_list"; @NonNls private static final String ATTRIBUTE_NAME = "name"; private static List<TreeItem<Pair<AbstractUrl, String>>> readRoots(final Element list, Project project) { List<TreeItem<Pair<AbstractUrl, String>>> result = new ArrayList<>(); readFavoritesOneLevel(list, project, result); return result; } private static void readFavoritesOneLevel(Element list, Project project, Collection<? super TreeItem<Pair<AbstractUrl, String>>> result) { for (Element favorite : list.getChildren(FAVORITES_ROOT)) { final String className = favorite.getAttributeValue(CLASS_NAME); final AbstractUrl abstractUrl = readUrlFromElement(favorite, project); if (abstractUrl != null) { final TreeItem<Pair<AbstractUrl, String>> treeItem = new TreeItem<>(Pair.create(abstractUrl, className)); result.add(treeItem); readFavoritesOneLevel(favorite, project, treeItem.getChildren()); } } } private static final ArrayList<AbstractUrl> ourAbstractUrlProviders = new ArrayList<>(); static { ourAbstractUrlProviders.add(new ModuleUrl(null, null)); ourAbstractUrlProviders.add(new DirectoryUrl(null, null)); ourAbstractUrlProviders.add(new ModuleGroupUrl(null)); ourAbstractUrlProviders.add(new PsiFileUrl(null)); ourAbstractUrlProviders.add(new LibraryModuleGroupUrl(null)); ourAbstractUrlProviders.add(new NamedLibraryUrl(null, null)); } @NonNls private static final String ATTRIBUTE_TYPE = "type"; @NonNls private static final String ATTRIBUTE_URL = "url"; @NonNls private static final String ATTRIBUTE_MODULE = "module"; @Nullable private static AbstractUrl readUrlFromElement(Element element, Project project) { final String type = element.getAttributeValue(ATTRIBUTE_TYPE); final String urlValue = element.getAttributeValue(ATTRIBUTE_URL); final String moduleName = element.getAttributeValue(ATTRIBUTE_MODULE); for (FavoriteNodeProvider nodeProvider : FavoriteNodeProvider.EP_NAME.getExtensions(project)) { if (nodeProvider.getFavoriteTypeId().equals(type)) { return new AbstractUrlFavoriteAdapter(urlValue, moduleName, nodeProvider); } } for (AbstractUrl urlProvider : ourAbstractUrlProviders) { AbstractUrl url = urlProvider.createUrl(type, moduleName, urlValue); if (url != null) return url; } return null; } @Override public void writeExternal(Element element) throws WriteExternalException { for (final String name : myFavoritesRootsOrder) { Element list = new Element(ELEMENT_FAVORITES_LIST); list.setAttribute(ATTRIBUTE_NAME, name); writeRoots(list, myName2FavoritesRoots.get(name)); element.addContent(list); } DefaultJDOMExternalizer.writeExternal(this, element); } @Nullable public static AbstractUrl createUrlByElement(Object element, final Project project) { if (element instanceof SmartPsiElementPointer) element = ((SmartPsiElementPointer)element).getElement(); for (FavoriteNodeProvider nodeProvider : FavoriteNodeProvider.EP_NAME.getExtensions(project)) { String url = nodeProvider.getElementUrl(element); if (url != null) { return new AbstractUrlFavoriteAdapter(url, nodeProvider.getElementModuleName(element), nodeProvider); } } for (AbstractUrl urlProvider : ourAbstractUrlProviders) { AbstractUrl url = urlProvider.createUrlByElement(element); if (url != null) return url; } return null; } private static void writeRoots(Element element, Collection<? extends TreeItem<Pair<AbstractUrl, String>>> roots) { for (TreeItem<Pair<AbstractUrl, String>> root : roots) { final AbstractUrl url = root.getData().getFirst(); if (url == null) continue; final Element list = new Element(FAVORITES_ROOT); url.write(list); list.setAttribute(CLASS_NAME, root.getData().getSecond()); element.addContent(list); final List<TreeItem<Pair<AbstractUrl, String>>> children = root.getChildren(); if (children != null && !children.isEmpty()) { writeRoots(list, children); } } } public String getFavoriteListName(@Nullable final String currentSubId, @NotNull final VirtualFile vFile) { if (currentSubId != null && contains(currentSubId, vFile)) { return currentSubId; } for (String listName : myName2FavoritesRoots.keySet()) { if (contains(listName, vFile)) { return listName; } } return null; } // currently only one level here.. public boolean contains(@NotNull String name, @NotNull final VirtualFile vFile) { final ProjectFileIndex projectFileIndex = ProjectRootManager.getInstance(myProject).getFileIndex(); final Set<Boolean> find = new HashSet<>(); final ContentIterator contentIterator = fileOrDir -> { if (fileOrDir.getPath().equals(vFile.getPath())) { find.add(Boolean.TRUE); } return true; }; Collection<TreeItem<Pair<AbstractUrl, String>>> urls = getFavoritesListRootUrls(name); for (TreeItem<Pair<AbstractUrl, String>> pair : urls) { AbstractUrl abstractUrl = pair.getData().getFirst(); if (abstractUrl == null) { continue; } final Object[] path = abstractUrl.createPath(myProject); if (path == null || path.length < 1 || path[0] == null) { continue; } Object element = path[path.length - 1]; if (element instanceof SmartPsiElementPointer) { final VirtualFile virtualFile = PsiUtilCore.getVirtualFile(((SmartPsiElementPointer)element).getElement()); if (virtualFile == null) continue; if (vFile.getPath().equals(virtualFile.getPath())) { return true; } if (!virtualFile.isDirectory()) { continue; } projectFileIndex.iterateContentUnderDirectory(virtualFile, contentIterator); } if (element instanceof PsiElement) { final VirtualFile virtualFile = PsiUtilCore.getVirtualFile((PsiElement)element); if (virtualFile == null) continue; if (vFile.getPath().equals(virtualFile.getPath())) { return true; } if (!virtualFile.isDirectory()) { continue; } projectFileIndex.iterateContentUnderDirectory(virtualFile, contentIterator); } if (element instanceof Module) { ModuleRootManager.getInstance((Module)element).getFileIndex().iterateContent(contentIterator); } if (element instanceof LibraryGroupElement) { final boolean inLibrary = ModuleRootManager.getInstance(((LibraryGroupElement)element).getModule()).getFileIndex().isInContent(vFile) && projectFileIndex.isInLibraryClasses(vFile); if (inLibrary) { return true; } } if (element instanceof NamedLibraryElement) { NamedLibraryElement namedLibraryElement = (NamedLibraryElement)element; final VirtualFile[] files = namedLibraryElement.getOrderEntry().getRootFiles(OrderRootType.CLASSES); if (ArrayUtil.find(files, vFile) > -1) { return true; } } if (element instanceof ModuleGroup) { ModuleGroup group = (ModuleGroup)element; final Collection<Module> modules = group.modulesInGroup(myProject, true); for (Module module : modules) { ModuleRootManager.getInstance(module).getFileIndex().iterateContent(contentIterator); } } for (FavoriteNodeProvider provider : FavoriteNodeProvider.EP_NAME.getExtensions(myProject)) { if (provider.elementContainsFile(element, vFile)) { return true; } } if (!find.isEmpty()) { return true; } } return false; } private static void iterateTreeItems(final Collection<? extends TreeItem<Pair<AbstractUrl, String>>> coll, Consumer<? super TreeItem<Pair<AbstractUrl, String>>> consumer) { final ArrayDeque<TreeItem<Pair<AbstractUrl, String>>> queue = new ArrayDeque<>(coll); while (!queue.isEmpty()) { final TreeItem<Pair<AbstractUrl, String>> item = queue.removeFirst(); consumer.consume(item); final List<TreeItem<Pair<AbstractUrl, String>>> children = item.getChildren(); if (children != null && !children.isEmpty()) { queue.addAll(children); } } } protected Collection<VirtualFile> getVirtualFiles(String listName, boolean recursively) { if (getListProvider(listName) != null) return Collections.emptyList(); Collection<VirtualFile> result = new SmartList<>(); final List<TreeItem<Pair<AbstractUrl, String>>> roots = myName2FavoritesRoots.get(listName); if (!recursively) { for (TreeItem<Pair<AbstractUrl, String>> item : roots) { VirtualFile file = getVirtualFile(item); if (file != null) { result.add(file); } } } else { iterateTreeItems(roots, item -> { VirtualFile file = getVirtualFile(item); if (file != null) { result.add(file); } }); } return result; } @Nullable private VirtualFile getVirtualFile(TreeItem<Pair<AbstractUrl, String>> item) { Pair<AbstractUrl, String> data = item.getData(); Object[] path = data.first.createPath(myProject); if (path != null && path.length == 1) { if (path[0] instanceof PsiFile) { VirtualFile virtualFile = ((PsiFile)path[0]).getVirtualFile(); if (virtualFile != null && !virtualFile.isDirectory()) { return virtualFile; } } if (path[0] instanceof File) { VirtualFile virtualFile = VfsUtil.findFileByIoFile((File)path[0], false); if (virtualFile != null && !virtualFile.isDirectory()) { return virtualFile; } } } return null; } private class MyRootsChangeAdapter extends PsiTreeChangeAdapter { @Override public void beforeChildMovement(@NotNull final PsiTreeChangeEvent event) { final PsiElement oldParent = event.getOldParent(); final PsiElement newParent = event.getNewParent(); final PsiElement child = event.getChild(); if (newParent instanceof PsiDirectory) { final Module module = ModuleUtilCore.findModuleForPsiElement(newParent); if (module == null) return; AbstractUrl childUrl = null; if (child instanceof PsiFile) { childUrl = new PsiFileUrl(((PsiDirectory)newParent).getVirtualFile().getUrl() + "/" + ((PsiFile)child).getName()); } else if (child instanceof PsiDirectory) { childUrl = new DirectoryUrl(((PsiDirectory)newParent).getVirtualFile().getUrl() + "/" + ((PsiDirectory)child).getName(), module.getName()); } for (String listName : myFavoritesRootsOrder) { final List<TreeItem<Pair<AbstractUrl, String>>> roots = myName2FavoritesRoots.get(listName); final AbstractUrl finalChildUrl = childUrl; iterateTreeItems(roots, item -> { final Pair<AbstractUrl, String> root = item.getData(); final Object[] path = root.first.createPath(myProject); if (path == null || path.length < 1 || path[0] == null) { return; } final Object element = path[path.length - 1]; if (element == child && finalChildUrl != null) { item.setData(Pair.create(finalChildUrl, root.second)); } else { if (element == oldParent) { item.setData(Pair.create(root.first.createUrlByElement(newParent), root.second)); } } }); } } } @Override public void beforePropertyChange(@NotNull final PsiTreeChangeEvent event) { if (event.getPropertyName().equals(PsiTreeChangeEvent.PROP_FILE_NAME) || event.getPropertyName().equals(PsiTreeChangeEvent.PROP_DIRECTORY_NAME)) { final PsiElement psiElement = event.getChild(); if (psiElement instanceof PsiFile || psiElement instanceof PsiDirectory) { final Module module = ModuleUtilCore.findModuleForPsiElement(psiElement); if (module == null) return; final String url = ((PsiDirectory)psiElement.getParent()).getVirtualFile().getUrl() + "/" + event.getNewValue(); final AbstractUrl childUrl = psiElement instanceof PsiFile ? new PsiFileUrl(url) : new DirectoryUrl(url, module.getName()); for (String listName : myFavoritesRootsOrder) { final List<TreeItem<Pair<AbstractUrl, String>>> roots = myName2FavoritesRoots.get(listName); iterateTreeItems(roots, item -> { final Pair<AbstractUrl, String> root = item.getData(); final Object[] path = root.first.createPath(myProject); if (path == null || path.length < 1 || path[0] == null) { return; } final Object element = path[path.length - 1]; if (element == psiElement && psiElement instanceof PsiFile) { item.setData(Pair.create(childUrl, root.second)); } else { item.setData(root); } }); } } } } } }
/* * Copyright (c) 2021, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.interpolate.impl; import boofcv.BoofTesting; import boofcv.alg.interpolate.InterpolatePixelS; import boofcv.alg.misc.GImageMiscOps; import boofcv.core.image.border.FactoryImageBorder; import boofcv.struct.border.ImageBorder; import boofcv.struct.image.ImageGray; import boofcv.testing.BoofStandardJUnit; import org.ejml.UtilEjml; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; /** * Several standardized tests that ensure correct implementations of {@link boofcv.alg.interpolate.InterpolatePixelS}. * * @author Peter Abeles */ public abstract class GeneralChecksInterpolationPixelS<T extends ImageGray<T>> extends BoofStandardJUnit { protected int width = 320; protected int height = 240; protected boolean exceptionOutside = true; protected abstract T createImage( int width , int height ); protected abstract InterpolatePixelS<T> wrap(T image, int minValue, int maxValue); /** * Checks value returned by get() against values computed using * an alternative approach. */ @Test void get() { T img = createImage(width, height); GImageMiscOps.fillUniform(img, rand, 0, 100); BoofTesting.checkSubImage(this, "get", false, img); } public void get(T img) { InterpolatePixelS<T> interp = wrap(img, 0, 100); assertEquals(compute(img, 10, 10), interp.get(10, 10), UtilEjml.TEST_F32); assertEquals(compute(img, 10.1f, 10), interp.get(10.1f, 10), UtilEjml.TEST_F32); assertEquals(compute(img, 10, 10.6f), interp.get(10, 10.6f), UtilEjml.TEST_F32); assertEquals(compute(img, 10.8f, 10.6f), interp.get(10.8f, 10.6f), UtilEjml.TEST_F32); } /** * See if accessing the image edge causes it to blow up. */ @Test void get_edges() { T img = createImage(width, height); GImageMiscOps.fillUniform(img, rand, 0, 100); BoofTesting.checkSubImage(this, "get_edges", false, img); } public void get_edges(T img) { InterpolatePixelS<T> interp = wrap(img, 0, 100); int borderX0 = interp.getFastBorderX(); int borderX1 = interp.getFastBorderX(); int borderY0 = interp.getFastBorderY(); int borderY1 = interp.getFastBorderY(); compare(interp,img, width-borderX1-1, height/2); compare(interp,img, borderX0, height/2); compare(interp,img, width/2, height-borderY1-1); compare(interp,img, width/2, borderY0); compare(interp,img, borderX0, borderY0); compare(interp,img, width - borderX1-1, height - borderY1-1); } protected void compare( InterpolatePixelS<T> interp , T img , float x , float y ) { assertEquals(compute(img, x, y), interp.get(x, y), 1e-5f); } /** * Compute the interpolation manually using independently written code. For * example, easy to write but inefficient. */ protected abstract float compute(T img, float x, float y); /** * Sees if get throws an exception if it is out of bounds */ @Test void get_outside_noborder() { T img = createImage(width, height); InterpolatePixelS<T> interp = wrap(img, 0, 100); checkOutside(interp,-0.1f,0); checkOutside(interp,0,-0.1f); checkOutside(interp,width-0.99f,0); checkOutside(interp,0,height-0.99f); } private void checkOutside(InterpolatePixelS<T> interp, float x , float y) { try { interp.get(x, y); if( exceptionOutside ) fail("Didn't throw an exception when accessing an outside pixel"); } catch( RuntimeException e ) {} } /** * Compare get_fast against the value returned by get() */ @Test void get_fast() { T img = createImage(width, height); GImageMiscOps.fillUniform(img, rand, 0, 100); BoofTesting.checkSubImage(this, "get_fast", false, img); } public void get_fast(T img) { InterpolatePixelS<T> interp = wrap(img, 0, 100); assertEquals(interp.get(10, 10), interp.get_fast(10, 10), 1e-6); assertEquals(interp.get(10.1f, 10), interp.get_fast(10.1f, 10), 1e-6); assertEquals(interp.get(10, 10.6f), interp.get_fast(10, 10.6f), 1e-6); assertEquals(interp.get(10.8f, 10.6f), interp.get_fast(10.8f, 10.6f), 1e-6); } /** * If a border is specified it should handle everything just fine */ @Test void get_outside_border() { T img = createImage(width, height); GImageMiscOps.fillUniform(img, rand, 0, 100); BoofTesting.checkSubImage(this, "get_outside_border", false, img); } public void get_outside_border(T img) { InterpolatePixelS<T> interp = wrap(img, 0, 100); ImageBorder<T> border = (ImageBorder)FactoryImageBorder.singleValue(5, img.getClass()); interp.setBorder(border); interp.setImage(img); // outside the image it should work just fine assertEquals(5,interp.get(-10, 23),1e-6); assertEquals(5,interp.get(0,2330),1e-6); } @Test void getImage() { T img = createImage(width, height); InterpolatePixelS<T> interp = wrap(img, 0, 100); assertTrue(img == interp.getImage()); } /** * Scans through the whole image and for each pixel which is "safe" it compares the safe * value to the unsafe value. */ @Test void isInFastBounds() { T img = createImage(width, height); GImageMiscOps.fillUniform(img, rand, 0, 100); InterpolatePixelS<T> interp = wrap(img, 0, 100); for( int y = 0; y < height; y++ ) { for( int x = 0; x < width; x++ ) { if( interp.isInFastBounds(x, y)) { float a = interp.get(x,y); float b = interp.get_fast(x, y); assertEquals(a,b,1e-4); } } } } /** * Pixels out of the image are clearly not in the fast bounds */ @Test void isInFastBounds_outOfBounds() { T img = createImage(width, height); InterpolatePixelS<T> interp = wrap(img, 0, 100); assertFalse(interp.isInFastBounds(-0.1f,0)); assertFalse(interp.isInFastBounds(0, -0.1f)); assertFalse(interp.isInFastBounds(width-0.99f,0)); assertFalse(interp.isInFastBounds(0,height-0.99f)); } @Test void getFastBorder() { T img = createImage(width, height); InterpolatePixelS<T> interp = wrap(img, 0, 100); // create a region with positive cases int x0 = interp.getFastBorderX(); int x1 = width - interp.getFastBorderX(); int y0 = interp.getFastBorderX(); int y1 = height - interp.getFastBorderX(); for( int y = 0; y < height; y++ ) { for( int x = 0; x < width; x++ ) { if( x >= x0 && x < x1 && y >= y0 && y < y1 ) { assertTrue(interp.isInFastBounds(x,y)); } else { // stuff outside of the border does not need to be outside the fast bounds // this is a crude test to avoid checking the bounds every time through a loop // assertFalse(interp.isInFastBounds(x,y)); } } } } /** * Interpolates the whole image and sees if the values returned are within the specified bounds */ @Test void checkPixelValueBoundsHonored() { T img = createImage(20, 30); GImageMiscOps.fillUniform(img, rand, 0, 100); InterpolatePixelS<T> interp = wrap(img, 0, 100); interp.setBorder(FactoryImageBorder.singleValue(0, img)); for( int off = 0; off < 5; off++ ) { float frac = off/5.0f; for( int y = 0; y < img.height; y++ ) { for( int x = 0; x < img.width; x++ ) { float v = interp.get(x+frac,y+frac); assertTrue( v >= 0 && v <= 100 ); } } } } /** * Should produce identical results when given a sub-image. */ @Test void checkSubImage() { T imgA = createImage(30, 40); GImageMiscOps.fillUniform(imgA, rand, 0, 100); InterpolatePixelS<T> interpA = wrap(imgA, 0, 100); T imgB = BoofTesting.createSubImageOf(imgA); InterpolatePixelS<T> interpB = wrap(imgB, 0, 100); interpA.setBorder(FactoryImageBorder.singleValue(0, imgA)); interpB.setBorder(FactoryImageBorder.singleValue(0, imgB)); for (int y = 0; y < 40; y++) { for (int x = 0; x < 30; x++) { float dx = rand.nextFloat()*2-1f; float dy = rand.nextFloat()*2-1f; float xx = x + dx; float yy = y + dy; // ,make sure it is inside the image bound if( yy < 0 ) yy = 0; else if( yy > 39 ) yy = 39; if( xx < 0 ) xx = 0; else if( xx > 29 ) xx = 29; assertTrue(interpA.get(xx, yy) == interpB.get(xx,yy),"( " + x + " , " + y + " )"); } } } @Test void copy() { T img = createImage(30, 40); GImageMiscOps.fillUniform(img, rand, 0, 100); ImageBorder<T> border = (ImageBorder)FactoryImageBorder.singleValue(5, img.getClass()); InterpolatePixelS<T> interpA = wrap(img, 0, 100); interpA.setBorder(border); InterpolatePixelS<T> interpB = interpA.copy(); assertNotSame(interpA.getBorder(), interpB.getBorder()); } }
/* * generated by Xtext */ package at.ac.univie.cs.swa.soa.services; import com.google.inject.Singleton; import com.google.inject.Inject; import java.util.List; import org.eclipse.xtext.*; import org.eclipse.xtext.service.GrammarProvider; import org.eclipse.xtext.service.AbstractElementFinder.*; import org.eclipse.xtext.common.services.TerminalsGrammarAccess; @Singleton public class SDLGrammarAccess extends AbstractGrammarElementFinder { public class SDLElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "SDL"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cSDLAction_0 = (Action)cGroup.eContents().get(0); private final Keyword cModuleKeyword_1 = (Keyword)cGroup.eContents().get(1); private final Assignment cNameAssignment_2 = (Assignment)cGroup.eContents().get(2); private final RuleCall cNameFQNParserRuleCall_2_0 = (RuleCall)cNameAssignment_2.eContents().get(0); private final Assignment cImportsAssignment_3 = (Assignment)cGroup.eContents().get(3); private final RuleCall cImportsImportParserRuleCall_3_0 = (RuleCall)cImportsAssignment_3.eContents().get(0); private final Alternatives cAlternatives_4 = (Alternatives)cGroup.eContents().get(4); private final Assignment cServicesAssignment_4_0 = (Assignment)cAlternatives_4.eContents().get(0); private final RuleCall cServicesServiceParserRuleCall_4_0_0 = (RuleCall)cServicesAssignment_4_0.eContents().get(0); private final Assignment cDataAssignment_4_1 = (Assignment)cAlternatives_4.eContents().get(1); private final RuleCall cDataDataElementParserRuleCall_4_1_0 = (RuleCall)cDataAssignment_4_1.eContents().get(0); private final Assignment cNodesAssignment_4_2 = (Assignment)cAlternatives_4.eContents().get(2); private final RuleCall cNodesNodeParserRuleCall_4_2_0 = (RuleCall)cNodesAssignment_4_2.eContents().get(0); //SDL: // {SDL} "module" name=FQN imports+=Import* (services+=Service | data+=DataElement | nodes+=Node)*; public ParserRule getRule() { return rule; } //{SDL} "module" name=FQN imports+=Import* (services+=Service | data+=DataElement | nodes+=Node)* public Group getGroup() { return cGroup; } //{SDL} public Action getSDLAction_0() { return cSDLAction_0; } //"module" public Keyword getModuleKeyword_1() { return cModuleKeyword_1; } //name=FQN public Assignment getNameAssignment_2() { return cNameAssignment_2; } //FQN public RuleCall getNameFQNParserRuleCall_2_0() { return cNameFQNParserRuleCall_2_0; } //imports+=Import* public Assignment getImportsAssignment_3() { return cImportsAssignment_3; } //Import public RuleCall getImportsImportParserRuleCall_3_0() { return cImportsImportParserRuleCall_3_0; } //(services+=Service | data+=DataElement | nodes+=Node)* public Alternatives getAlternatives_4() { return cAlternatives_4; } //services+=Service public Assignment getServicesAssignment_4_0() { return cServicesAssignment_4_0; } //Service public RuleCall getServicesServiceParserRuleCall_4_0_0() { return cServicesServiceParserRuleCall_4_0_0; } //data+=DataElement public Assignment getDataAssignment_4_1() { return cDataAssignment_4_1; } //DataElement public RuleCall getDataDataElementParserRuleCall_4_1_0() { return cDataDataElementParserRuleCall_4_1_0; } //nodes+=Node public Assignment getNodesAssignment_4_2() { return cNodesAssignment_4_2; } //Node public RuleCall getNodesNodeParserRuleCall_4_2_0() { return cNodesNodeParserRuleCall_4_2_0; } } public class ServiceElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Service"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cServiceAction_0 = (Action)cGroup.eContents().get(0); private final Keyword cServiceKeyword_1 = (Keyword)cGroup.eContents().get(1); private final Assignment cNameAssignment_2 = (Assignment)cGroup.eContents().get(2); private final RuleCall cNameIDTerminalRuleCall_2_0 = (RuleCall)cNameAssignment_2.eContents().get(0); private final Keyword cLeftCurlyBracketKeyword_3 = (Keyword)cGroup.eContents().get(3); private final Group cGroup_4 = (Group)cGroup.eContents().get(4); private final Keyword cOperationKeyword_4_0 = (Keyword)cGroup_4.eContents().get(0); private final Assignment cOperationsAssignment_4_1 = (Assignment)cGroup_4.eContents().get(1); private final RuleCall cOperationsOperationParserRuleCall_4_1_0 = (RuleCall)cOperationsAssignment_4_1.eContents().get(0); private final Keyword cRightCurlyBracketKeyword_5 = (Keyword)cGroup.eContents().get(5); //Service: // {Service} "Service" name=ID "{" ("operation" operations+=Operation)* "}"; public ParserRule getRule() { return rule; } //{Service} "Service" name=ID "{" ("operation" operations+=Operation)* "}" public Group getGroup() { return cGroup; } //{Service} public Action getServiceAction_0() { return cServiceAction_0; } //"Service" public Keyword getServiceKeyword_1() { return cServiceKeyword_1; } //name=ID public Assignment getNameAssignment_2() { return cNameAssignment_2; } //ID public RuleCall getNameIDTerminalRuleCall_2_0() { return cNameIDTerminalRuleCall_2_0; } //"{" public Keyword getLeftCurlyBracketKeyword_3() { return cLeftCurlyBracketKeyword_3; } //("operation" operations+=Operation)* public Group getGroup_4() { return cGroup_4; } //"operation" public Keyword getOperationKeyword_4_0() { return cOperationKeyword_4_0; } //operations+=Operation public Assignment getOperationsAssignment_4_1() { return cOperationsAssignment_4_1; } //Operation public RuleCall getOperationsOperationParserRuleCall_4_1_0() { return cOperationsOperationParserRuleCall_4_1_0; } //"}" public Keyword getRightCurlyBracketKeyword_5() { return cRightCurlyBracketKeyword_5; } } public class OperationElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Operation"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cOperationAction_0 = (Action)cGroup.eContents().get(0); private final Assignment cNameAssignment_1 = (Assignment)cGroup.eContents().get(1); private final RuleCall cNameIDTerminalRuleCall_1_0 = (RuleCall)cNameAssignment_1.eContents().get(0); private final Keyword cLeftCurlyBracketKeyword_2 = (Keyword)cGroup.eContents().get(2); private final Group cGroup_3 = (Group)cGroup.eContents().get(3); private final Keyword cInputKeyword_3_0 = (Keyword)cGroup_3.eContents().get(0); private final Assignment cInputNameAssignment_3_1 = (Assignment)cGroup_3.eContents().get(1); private final RuleCall cInputNameSTRINGTerminalRuleCall_3_1_0 = (RuleCall)cInputNameAssignment_3_1.eContents().get(0); private final Keyword cColonKeyword_3_2 = (Keyword)cGroup_3.eContents().get(2); private final Assignment cInputTypeAssignment_3_3 = (Assignment)cGroup_3.eContents().get(3); private final CrossReference cInputTypeDataElementCrossReference_3_3_0 = (CrossReference)cInputTypeAssignment_3_3.eContents().get(0); private final RuleCall cInputTypeDataElementFQNParserRuleCall_3_3_0_1 = (RuleCall)cInputTypeDataElementCrossReference_3_3_0.eContents().get(1); private final Group cGroup_4 = (Group)cGroup.eContents().get(4); private final Keyword cOutputKeyword_4_0 = (Keyword)cGroup_4.eContents().get(0); private final Assignment cOutputNameAssignment_4_1 = (Assignment)cGroup_4.eContents().get(1); private final RuleCall cOutputNameSTRINGTerminalRuleCall_4_1_0 = (RuleCall)cOutputNameAssignment_4_1.eContents().get(0); private final Keyword cColonKeyword_4_2 = (Keyword)cGroup_4.eContents().get(2); private final Assignment cOutputTypeAssignment_4_3 = (Assignment)cGroup_4.eContents().get(3); private final CrossReference cOutputTypeDataElementCrossReference_4_3_0 = (CrossReference)cOutputTypeAssignment_4_3.eContents().get(0); private final RuleCall cOutputTypeDataElementFQNParserRuleCall_4_3_0_1 = (RuleCall)cOutputTypeDataElementCrossReference_4_3_0.eContents().get(1); private final Keyword cRightCurlyBracketKeyword_5 = (Keyword)cGroup.eContents().get(5); //Operation: // {Operation} name=ID "{" ("input" inputName=STRING ":" inputType=[DataElement|FQN])? ("output" outputName=STRING ":" // outputType=[DataElement|FQN])? "}"; public ParserRule getRule() { return rule; } //{Operation} name=ID "{" ("input" inputName=STRING ":" inputType=[DataElement|FQN])? ("output" outputName=STRING ":" //outputType=[DataElement|FQN])? "}" public Group getGroup() { return cGroup; } //{Operation} public Action getOperationAction_0() { return cOperationAction_0; } //name=ID public Assignment getNameAssignment_1() { return cNameAssignment_1; } //ID public RuleCall getNameIDTerminalRuleCall_1_0() { return cNameIDTerminalRuleCall_1_0; } //"{" public Keyword getLeftCurlyBracketKeyword_2() { return cLeftCurlyBracketKeyword_2; } //("input" inputName=STRING ":" inputType=[DataElement|FQN])? public Group getGroup_3() { return cGroup_3; } //"input" public Keyword getInputKeyword_3_0() { return cInputKeyword_3_0; } //inputName=STRING public Assignment getInputNameAssignment_3_1() { return cInputNameAssignment_3_1; } //STRING public RuleCall getInputNameSTRINGTerminalRuleCall_3_1_0() { return cInputNameSTRINGTerminalRuleCall_3_1_0; } //":" public Keyword getColonKeyword_3_2() { return cColonKeyword_3_2; } //inputType=[DataElement|FQN] public Assignment getInputTypeAssignment_3_3() { return cInputTypeAssignment_3_3; } //[DataElement|FQN] public CrossReference getInputTypeDataElementCrossReference_3_3_0() { return cInputTypeDataElementCrossReference_3_3_0; } //FQN public RuleCall getInputTypeDataElementFQNParserRuleCall_3_3_0_1() { return cInputTypeDataElementFQNParserRuleCall_3_3_0_1; } //("output" outputName=STRING ":" outputType=[DataElement|FQN])? public Group getGroup_4() { return cGroup_4; } //"output" public Keyword getOutputKeyword_4_0() { return cOutputKeyword_4_0; } //outputName=STRING public Assignment getOutputNameAssignment_4_1() { return cOutputNameAssignment_4_1; } //STRING public RuleCall getOutputNameSTRINGTerminalRuleCall_4_1_0() { return cOutputNameSTRINGTerminalRuleCall_4_1_0; } //":" public Keyword getColonKeyword_4_2() { return cColonKeyword_4_2; } //outputType=[DataElement|FQN] public Assignment getOutputTypeAssignment_4_3() { return cOutputTypeAssignment_4_3; } //[DataElement|FQN] public CrossReference getOutputTypeDataElementCrossReference_4_3_0() { return cOutputTypeDataElementCrossReference_4_3_0; } //FQN public RuleCall getOutputTypeDataElementFQNParserRuleCall_4_3_0_1() { return cOutputTypeDataElementFQNParserRuleCall_4_3_0_1; } //"}" public Keyword getRightCurlyBracketKeyword_5() { return cRightCurlyBracketKeyword_5; } } public class NodeElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Node"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cNodeAction_0 = (Action)cGroup.eContents().get(0); private final Keyword cNodeKeyword_1 = (Keyword)cGroup.eContents().get(1); private final Assignment cNameAssignment_2 = (Assignment)cGroup.eContents().get(2); private final RuleCall cNameIDTerminalRuleCall_2_0 = (RuleCall)cNameAssignment_2.eContents().get(0); private final Keyword cUriKeyword_3 = (Keyword)cGroup.eContents().get(3); private final Keyword cEqualsSignKeyword_4 = (Keyword)cGroup.eContents().get(4); private final Assignment cBaseURIAssignment_5 = (Assignment)cGroup.eContents().get(5); private final RuleCall cBaseURISTRINGTerminalRuleCall_5_0 = (RuleCall)cBaseURIAssignment_5.eContents().get(0); private final Keyword cHostsKeyword_6 = (Keyword)cGroup.eContents().get(6); private final Keyword cLeftCurlyBracketKeyword_7 = (Keyword)cGroup.eContents().get(7); private final Assignment cServicesAssignment_8 = (Assignment)cGroup.eContents().get(8); private final CrossReference cServicesServiceCrossReference_8_0 = (CrossReference)cServicesAssignment_8.eContents().get(0); private final RuleCall cServicesServiceFQNParserRuleCall_8_0_1 = (RuleCall)cServicesServiceCrossReference_8_0.eContents().get(1); private final Group cGroup_9 = (Group)cGroup.eContents().get(9); private final Keyword cCommaKeyword_9_0 = (Keyword)cGroup_9.eContents().get(0); private final Assignment cServicesAssignment_9_1 = (Assignment)cGroup_9.eContents().get(1); private final CrossReference cServicesServiceCrossReference_9_1_0 = (CrossReference)cServicesAssignment_9_1.eContents().get(0); private final RuleCall cServicesServiceFQNParserRuleCall_9_1_0_1 = (RuleCall)cServicesServiceCrossReference_9_1_0.eContents().get(1); private final Keyword cRightCurlyBracketKeyword_10 = (Keyword)cGroup.eContents().get(10); //Node: // {Node} "Node" name=ID "uri" "=" baseURI=STRING "hosts" "{" services+=[Service|FQN] ("," services+=[Service|FQN])* "}"; public ParserRule getRule() { return rule; } //{Node} "Node" name=ID "uri" "=" baseURI=STRING "hosts" "{" services+=[Service|FQN] ("," services+=[Service|FQN])* "}" public Group getGroup() { return cGroup; } //{Node} public Action getNodeAction_0() { return cNodeAction_0; } //"Node" public Keyword getNodeKeyword_1() { return cNodeKeyword_1; } //name=ID public Assignment getNameAssignment_2() { return cNameAssignment_2; } //ID public RuleCall getNameIDTerminalRuleCall_2_0() { return cNameIDTerminalRuleCall_2_0; } //"uri" public Keyword getUriKeyword_3() { return cUriKeyword_3; } //"=" public Keyword getEqualsSignKeyword_4() { return cEqualsSignKeyword_4; } //baseURI=STRING public Assignment getBaseURIAssignment_5() { return cBaseURIAssignment_5; } //STRING public RuleCall getBaseURISTRINGTerminalRuleCall_5_0() { return cBaseURISTRINGTerminalRuleCall_5_0; } //"hosts" public Keyword getHostsKeyword_6() { return cHostsKeyword_6; } //"{" public Keyword getLeftCurlyBracketKeyword_7() { return cLeftCurlyBracketKeyword_7; } //services+=[Service|FQN] public Assignment getServicesAssignment_8() { return cServicesAssignment_8; } //[Service|FQN] public CrossReference getServicesServiceCrossReference_8_0() { return cServicesServiceCrossReference_8_0; } //FQN public RuleCall getServicesServiceFQNParserRuleCall_8_0_1() { return cServicesServiceFQNParserRuleCall_8_0_1; } //("," services+=[Service|FQN])* public Group getGroup_9() { return cGroup_9; } //"," public Keyword getCommaKeyword_9_0() { return cCommaKeyword_9_0; } //services+=[Service|FQN] public Assignment getServicesAssignment_9_1() { return cServicesAssignment_9_1; } //[Service|FQN] public CrossReference getServicesServiceCrossReference_9_1_0() { return cServicesServiceCrossReference_9_1_0; } //FQN public RuleCall getServicesServiceFQNParserRuleCall_9_1_0_1() { return cServicesServiceFQNParserRuleCall_9_1_0_1; } //"}" public Keyword getRightCurlyBracketKeyword_10() { return cRightCurlyBracketKeyword_10; } } public class DataElementElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "DataElement"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final RuleCall cSimpleElementParserRuleCall_0 = (RuleCall)cAlternatives.eContents().get(0); private final RuleCall cComplexElementParserRuleCall_1 = (RuleCall)cAlternatives.eContents().get(1); //DataElement: // SimpleElement | ComplexElement; public ParserRule getRule() { return rule; } //SimpleElement | ComplexElement public Alternatives getAlternatives() { return cAlternatives; } //SimpleElement public RuleCall getSimpleElementParserRuleCall_0() { return cSimpleElementParserRuleCall_0; } //ComplexElement public RuleCall getComplexElementParserRuleCall_1() { return cComplexElementParserRuleCall_1; } } public class ComplexElementElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "ComplexElement"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cComplexElementAction_0 = (Action)cGroup.eContents().get(0); private final Keyword cComplexDataKeyword_1 = (Keyword)cGroup.eContents().get(1); private final Assignment cNameAssignment_2 = (Assignment)cGroup.eContents().get(2); private final RuleCall cNameIDTerminalRuleCall_2_0 = (RuleCall)cNameAssignment_2.eContents().get(0); private final Keyword cLeftCurlyBracketKeyword_3 = (Keyword)cGroup.eContents().get(3); private final Assignment cElementsAssignment_4 = (Assignment)cGroup.eContents().get(4); private final RuleCall cElementsSimpleElementParserRuleCall_4_0 = (RuleCall)cElementsAssignment_4.eContents().get(0); private final Keyword cRightCurlyBracketKeyword_5 = (Keyword)cGroup.eContents().get(5); //ComplexElement: // {ComplexElement} "complex-data" name=ID "{" elements+=SimpleElement* "}"; public ParserRule getRule() { return rule; } //{ComplexElement} "complex-data" name=ID "{" elements+=SimpleElement* "}" public Group getGroup() { return cGroup; } //{ComplexElement} public Action getComplexElementAction_0() { return cComplexElementAction_0; } //"complex-data" public Keyword getComplexDataKeyword_1() { return cComplexDataKeyword_1; } //name=ID public Assignment getNameAssignment_2() { return cNameAssignment_2; } //ID public RuleCall getNameIDTerminalRuleCall_2_0() { return cNameIDTerminalRuleCall_2_0; } //"{" public Keyword getLeftCurlyBracketKeyword_3() { return cLeftCurlyBracketKeyword_3; } //elements+=SimpleElement* public Assignment getElementsAssignment_4() { return cElementsAssignment_4; } //SimpleElement public RuleCall getElementsSimpleElementParserRuleCall_4_0() { return cElementsSimpleElementParserRuleCall_4_0; } //"}" public Keyword getRightCurlyBracketKeyword_5() { return cRightCurlyBracketKeyword_5; } } public class SimpleElementElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "SimpleElement"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cSimpleElementAction_0 = (Action)cGroup.eContents().get(0); private final Assignment cNameAssignment_1 = (Assignment)cGroup.eContents().get(1); private final RuleCall cNameIDTerminalRuleCall_1_0 = (RuleCall)cNameAssignment_1.eContents().get(0); private final Keyword cColonKeyword_2 = (Keyword)cGroup.eContents().get(2); private final Alternatives cAlternatives_3 = (Alternatives)cGroup.eContents().get(3); private final Assignment cTypeAssignment_3_0 = (Assignment)cAlternatives_3.eContents().get(0); private final RuleCall cTypeDATATYPEEnumRuleCall_3_0_0 = (RuleCall)cTypeAssignment_3_0.eContents().get(0); private final Assignment cRefAssignment_3_1 = (Assignment)cAlternatives_3.eContents().get(1); private final CrossReference cRefComplexElementCrossReference_3_1_0 = (CrossReference)cRefAssignment_3_1.eContents().get(0); private final RuleCall cRefComplexElementFQNParserRuleCall_3_1_0_1 = (RuleCall)cRefComplexElementCrossReference_3_1_0.eContents().get(1); private final Assignment cMultiplicityAssignment_4 = (Assignment)cGroup.eContents().get(4); private final RuleCall cMultiplicityMULTIPLICITYEnumRuleCall_4_0 = (RuleCall)cMultiplicityAssignment_4.eContents().get(0); //SimpleElement: // {SimpleElement} name=ID ":" (type=DATATYPE | ref=[ComplexElement|FQN]) multiplicity=MULTIPLICITY?; public ParserRule getRule() { return rule; } //{SimpleElement} name=ID ":" (type=DATATYPE | ref=[ComplexElement|FQN]) multiplicity=MULTIPLICITY? public Group getGroup() { return cGroup; } //{SimpleElement} public Action getSimpleElementAction_0() { return cSimpleElementAction_0; } //name=ID public Assignment getNameAssignment_1() { return cNameAssignment_1; } //ID public RuleCall getNameIDTerminalRuleCall_1_0() { return cNameIDTerminalRuleCall_1_0; } //":" public Keyword getColonKeyword_2() { return cColonKeyword_2; } //type=DATATYPE | ref=[ComplexElement|FQN] public Alternatives getAlternatives_3() { return cAlternatives_3; } //type=DATATYPE public Assignment getTypeAssignment_3_0() { return cTypeAssignment_3_0; } //DATATYPE public RuleCall getTypeDATATYPEEnumRuleCall_3_0_0() { return cTypeDATATYPEEnumRuleCall_3_0_0; } //ref=[ComplexElement|FQN] public Assignment getRefAssignment_3_1() { return cRefAssignment_3_1; } //[ComplexElement|FQN] public CrossReference getRefComplexElementCrossReference_3_1_0() { return cRefComplexElementCrossReference_3_1_0; } //FQN public RuleCall getRefComplexElementFQNParserRuleCall_3_1_0_1() { return cRefComplexElementFQNParserRuleCall_3_1_0_1; } //multiplicity=MULTIPLICITY? public Assignment getMultiplicityAssignment_4() { return cMultiplicityAssignment_4; } //MULTIPLICITY public RuleCall getMultiplicityMULTIPLICITYEnumRuleCall_4_0() { return cMultiplicityMULTIPLICITYEnumRuleCall_4_0; } } public class FQNElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "FQN"); private final Group cGroup = (Group)rule.eContents().get(1); private final RuleCall cIDTerminalRuleCall_0 = (RuleCall)cGroup.eContents().get(0); private final Group cGroup_1 = (Group)cGroup.eContents().get(1); private final Keyword cFullStopKeyword_1_0 = (Keyword)cGroup_1.eContents().get(0); private final RuleCall cIDTerminalRuleCall_1_1 = (RuleCall)cGroup_1.eContents().get(1); //FQN: // ID ("." ID)*; public ParserRule getRule() { return rule; } //ID ("." ID)* public Group getGroup() { return cGroup; } //ID public RuleCall getIDTerminalRuleCall_0() { return cIDTerminalRuleCall_0; } //("." ID)* public Group getGroup_1() { return cGroup_1; } //"." public Keyword getFullStopKeyword_1_0() { return cFullStopKeyword_1_0; } //ID public RuleCall getIDTerminalRuleCall_1_1() { return cIDTerminalRuleCall_1_1; } } public class FqnWildcardElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "FqnWildcard"); private final Group cGroup = (Group)rule.eContents().get(1); private final RuleCall cFQNParserRuleCall_0 = (RuleCall)cGroup.eContents().get(0); private final Keyword cFullStopAsteriskKeyword_1 = (Keyword)cGroup.eContents().get(1); //FqnWildcard: // FQN ".*"?; public ParserRule getRule() { return rule; } //FQN ".*"? public Group getGroup() { return cGroup; } //FQN public RuleCall getFQNParserRuleCall_0() { return cFQNParserRuleCall_0; } //".*"? public Keyword getFullStopAsteriskKeyword_1() { return cFullStopAsteriskKeyword_1; } } public class ImportElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Import"); private final Group cGroup = (Group)rule.eContents().get(1); private final Keyword cImportKeyword_0 = (Keyword)cGroup.eContents().get(0); private final Assignment cImportedNamespaceAssignment_1 = (Assignment)cGroup.eContents().get(1); private final RuleCall cImportedNamespaceFqnWildcardParserRuleCall_1_0 = (RuleCall)cImportedNamespaceAssignment_1.eContents().get(0); //Import: // "import" importedNamespace=FqnWildcard; public ParserRule getRule() { return rule; } //"import" importedNamespace=FqnWildcard public Group getGroup() { return cGroup; } //"import" public Keyword getImportKeyword_0() { return cImportKeyword_0; } //importedNamespace=FqnWildcard public Assignment getImportedNamespaceAssignment_1() { return cImportedNamespaceAssignment_1; } //FqnWildcard public RuleCall getImportedNamespaceFqnWildcardParserRuleCall_1_0() { return cImportedNamespaceFqnWildcardParserRuleCall_1_0; } } public class DATATYPEElements extends AbstractEnumRuleElementFinder { private final EnumRule rule = (EnumRule) GrammarUtil.findRuleForName(getGrammar(), "DATATYPE"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final EnumLiteralDeclaration cStringEnumLiteralDeclaration_0 = (EnumLiteralDeclaration)cAlternatives.eContents().get(0); private final Keyword cStringStringKeyword_0_0 = (Keyword)cStringEnumLiteralDeclaration_0.eContents().get(0); private final EnumLiteralDeclaration cIntegerEnumLiteralDeclaration_1 = (EnumLiteralDeclaration)cAlternatives.eContents().get(1); private final Keyword cIntegerIntegerKeyword_1_0 = (Keyword)cIntegerEnumLiteralDeclaration_1.eContents().get(0); private final EnumLiteralDeclaration cFloatEnumLiteralDeclaration_2 = (EnumLiteralDeclaration)cAlternatives.eContents().get(2); private final Keyword cFloatFloatKeyword_2_0 = (Keyword)cFloatEnumLiteralDeclaration_2.eContents().get(0); private final EnumLiteralDeclaration cDateEnumLiteralDeclaration_3 = (EnumLiteralDeclaration)cAlternatives.eContents().get(3); private final Keyword cDateDateKeyword_3_0 = (Keyword)cDateEnumLiteralDeclaration_3.eContents().get(0); private final EnumLiteralDeclaration cTimeEnumLiteralDeclaration_4 = (EnumLiteralDeclaration)cAlternatives.eContents().get(4); private final Keyword cTimeTimeKeyword_4_0 = (Keyword)cTimeEnumLiteralDeclaration_4.eContents().get(0); private final EnumLiteralDeclaration cDatetimeEnumLiteralDeclaration_5 = (EnumLiteralDeclaration)cAlternatives.eContents().get(5); private final Keyword cDatetimeDateTimeKeyword_5_0 = (Keyword)cDatetimeEnumLiteralDeclaration_5.eContents().get(0); private final EnumLiteralDeclaration cBinaryEnumLiteralDeclaration_6 = (EnumLiteralDeclaration)cAlternatives.eContents().get(6); private final Keyword cBinaryBinaryKeyword_6_0 = (Keyword)cBinaryEnumLiteralDeclaration_6.eContents().get(0); private final EnumLiteralDeclaration cBooleanEnumLiteralDeclaration_7 = (EnumLiteralDeclaration)cAlternatives.eContents().get(7); private final Keyword cBooleanBooleanKeyword_7_0 = (Keyword)cBooleanEnumLiteralDeclaration_7.eContents().get(0); //enum DATATYPE: // string | integer | float | date | time | datetime="dateTime" | binary | boolean; public EnumRule getRule() { return rule; } //string | integer | float | date | time | datetime="dateTime" | binary | boolean public Alternatives getAlternatives() { return cAlternatives; } //string public EnumLiteralDeclaration getStringEnumLiteralDeclaration_0() { return cStringEnumLiteralDeclaration_0; } //"string" public Keyword getStringStringKeyword_0_0() { return cStringStringKeyword_0_0; } //integer public EnumLiteralDeclaration getIntegerEnumLiteralDeclaration_1() { return cIntegerEnumLiteralDeclaration_1; } //"integer" public Keyword getIntegerIntegerKeyword_1_0() { return cIntegerIntegerKeyword_1_0; } //float public EnumLiteralDeclaration getFloatEnumLiteralDeclaration_2() { return cFloatEnumLiteralDeclaration_2; } //"float" public Keyword getFloatFloatKeyword_2_0() { return cFloatFloatKeyword_2_0; } //date public EnumLiteralDeclaration getDateEnumLiteralDeclaration_3() { return cDateEnumLiteralDeclaration_3; } //"date" public Keyword getDateDateKeyword_3_0() { return cDateDateKeyword_3_0; } //time public EnumLiteralDeclaration getTimeEnumLiteralDeclaration_4() { return cTimeEnumLiteralDeclaration_4; } //"time" public Keyword getTimeTimeKeyword_4_0() { return cTimeTimeKeyword_4_0; } //datetime="dateTime" public EnumLiteralDeclaration getDatetimeEnumLiteralDeclaration_5() { return cDatetimeEnumLiteralDeclaration_5; } //"dateTime" public Keyword getDatetimeDateTimeKeyword_5_0() { return cDatetimeDateTimeKeyword_5_0; } //binary public EnumLiteralDeclaration getBinaryEnumLiteralDeclaration_6() { return cBinaryEnumLiteralDeclaration_6; } //"binary" public Keyword getBinaryBinaryKeyword_6_0() { return cBinaryBinaryKeyword_6_0; } //boolean public EnumLiteralDeclaration getBooleanEnumLiteralDeclaration_7() { return cBooleanEnumLiteralDeclaration_7; } //"boolean" public Keyword getBooleanBooleanKeyword_7_0() { return cBooleanBooleanKeyword_7_0; } } public class MULTIPLICITYElements extends AbstractEnumRuleElementFinder { private final EnumRule rule = (EnumRule) GrammarUtil.findRuleForName(getGrammar(), "MULTIPLICITY"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final EnumLiteralDeclaration cOPTIONALEnumLiteralDeclaration_0 = (EnumLiteralDeclaration)cAlternatives.eContents().get(0); private final Keyword cOPTIONALQuestionMarkKeyword_0_0 = (Keyword)cOPTIONALEnumLiteralDeclaration_0.eContents().get(0); private final EnumLiteralDeclaration cPLUSEnumLiteralDeclaration_1 = (EnumLiteralDeclaration)cAlternatives.eContents().get(1); private final Keyword cPLUSPlusSignKeyword_1_0 = (Keyword)cPLUSEnumLiteralDeclaration_1.eContents().get(0); private final EnumLiteralDeclaration cSTAREnumLiteralDeclaration_2 = (EnumLiteralDeclaration)cAlternatives.eContents().get(2); private final Keyword cSTARAsteriskKeyword_2_0 = (Keyword)cSTAREnumLiteralDeclaration_2.eContents().get(0); //enum MULTIPLICITY: // OPTIONAL="?" | PLUS="+" | STAR="*"; public EnumRule getRule() { return rule; } //OPTIONAL="?" | PLUS="+" | STAR="*" public Alternatives getAlternatives() { return cAlternatives; } //OPTIONAL="?" public EnumLiteralDeclaration getOPTIONALEnumLiteralDeclaration_0() { return cOPTIONALEnumLiteralDeclaration_0; } //"?" public Keyword getOPTIONALQuestionMarkKeyword_0_0() { return cOPTIONALQuestionMarkKeyword_0_0; } //PLUS="+" public EnumLiteralDeclaration getPLUSEnumLiteralDeclaration_1() { return cPLUSEnumLiteralDeclaration_1; } //"+" public Keyword getPLUSPlusSignKeyword_1_0() { return cPLUSPlusSignKeyword_1_0; } //STAR="*" public EnumLiteralDeclaration getSTAREnumLiteralDeclaration_2() { return cSTAREnumLiteralDeclaration_2; } //"*" public Keyword getSTARAsteriskKeyword_2_0() { return cSTARAsteriskKeyword_2_0; } } private SDLElements pSDL; private ServiceElements pService; private OperationElements pOperation; private NodeElements pNode; private DataElementElements pDataElement; private ComplexElementElements pComplexElement; private SimpleElementElements pSimpleElement; private DATATYPEElements unknownRuleDATATYPE; private MULTIPLICITYElements unknownRuleMULTIPLICITY; private FQNElements pFQN; private FqnWildcardElements pFqnWildcard; private ImportElements pImport; private final Grammar grammar; private TerminalsGrammarAccess gaTerminals; @Inject public SDLGrammarAccess(GrammarProvider grammarProvider, TerminalsGrammarAccess gaTerminals) { this.grammar = internalFindGrammar(grammarProvider); this.gaTerminals = gaTerminals; } protected Grammar internalFindGrammar(GrammarProvider grammarProvider) { Grammar grammar = grammarProvider.getGrammar(this); while (grammar != null) { if ("at.ac.univie.cs.swa.soa.SDL".equals(grammar.getName())) { return grammar; } List<Grammar> grammars = grammar.getUsedGrammars(); if (!grammars.isEmpty()) { grammar = grammars.iterator().next(); } else { return null; } } return grammar; } public Grammar getGrammar() { return grammar; } public TerminalsGrammarAccess getTerminalsGrammarAccess() { return gaTerminals; } //SDL: // {SDL} "module" name=FQN imports+=Import* (services+=Service | data+=DataElement | nodes+=Node)*; public SDLElements getSDLAccess() { return (pSDL != null) ? pSDL : (pSDL = new SDLElements()); } public ParserRule getSDLRule() { return getSDLAccess().getRule(); } //Service: // {Service} "Service" name=ID "{" ("operation" operations+=Operation)* "}"; public ServiceElements getServiceAccess() { return (pService != null) ? pService : (pService = new ServiceElements()); } public ParserRule getServiceRule() { return getServiceAccess().getRule(); } //Operation: // {Operation} name=ID "{" ("input" inputName=STRING ":" inputType=[DataElement|FQN])? ("output" outputName=STRING ":" // outputType=[DataElement|FQN])? "}"; public OperationElements getOperationAccess() { return (pOperation != null) ? pOperation : (pOperation = new OperationElements()); } public ParserRule getOperationRule() { return getOperationAccess().getRule(); } //Node: // {Node} "Node" name=ID "uri" "=" baseURI=STRING "hosts" "{" services+=[Service|FQN] ("," services+=[Service|FQN])* "}"; public NodeElements getNodeAccess() { return (pNode != null) ? pNode : (pNode = new NodeElements()); } public ParserRule getNodeRule() { return getNodeAccess().getRule(); } //DataElement: // SimpleElement | ComplexElement; public DataElementElements getDataElementAccess() { return (pDataElement != null) ? pDataElement : (pDataElement = new DataElementElements()); } public ParserRule getDataElementRule() { return getDataElementAccess().getRule(); } //ComplexElement: // {ComplexElement} "complex-data" name=ID "{" elements+=SimpleElement* "}"; public ComplexElementElements getComplexElementAccess() { return (pComplexElement != null) ? pComplexElement : (pComplexElement = new ComplexElementElements()); } public ParserRule getComplexElementRule() { return getComplexElementAccess().getRule(); } //SimpleElement: // {SimpleElement} name=ID ":" (type=DATATYPE | ref=[ComplexElement|FQN]) multiplicity=MULTIPLICITY?; public SimpleElementElements getSimpleElementAccess() { return (pSimpleElement != null) ? pSimpleElement : (pSimpleElement = new SimpleElementElements()); } public ParserRule getSimpleElementRule() { return getSimpleElementAccess().getRule(); } //enum DATATYPE: // string | integer | float | date | time | datetime="dateTime" | binary | boolean; public DATATYPEElements getDATATYPEAccess() { return (unknownRuleDATATYPE != null) ? unknownRuleDATATYPE : (unknownRuleDATATYPE = new DATATYPEElements()); } public EnumRule getDATATYPERule() { return getDATATYPEAccess().getRule(); } //enum MULTIPLICITY: // OPTIONAL="?" | PLUS="+" | STAR="*"; public MULTIPLICITYElements getMULTIPLICITYAccess() { return (unknownRuleMULTIPLICITY != null) ? unknownRuleMULTIPLICITY : (unknownRuleMULTIPLICITY = new MULTIPLICITYElements()); } public EnumRule getMULTIPLICITYRule() { return getMULTIPLICITYAccess().getRule(); } //FQN: // ID ("." ID)*; public FQNElements getFQNAccess() { return (pFQN != null) ? pFQN : (pFQN = new FQNElements()); } public ParserRule getFQNRule() { return getFQNAccess().getRule(); } //FqnWildcard: // FQN ".*"?; public FqnWildcardElements getFqnWildcardAccess() { return (pFqnWildcard != null) ? pFqnWildcard : (pFqnWildcard = new FqnWildcardElements()); } public ParserRule getFqnWildcardRule() { return getFqnWildcardAccess().getRule(); } //Import: // "import" importedNamespace=FqnWildcard; public ImportElements getImportAccess() { return (pImport != null) ? pImport : (pImport = new ImportElements()); } public ParserRule getImportRule() { return getImportAccess().getRule(); } //terminal ID: // "^"? ("a".."z" | "A".."Z" | "_") ("a".."z" | "A".."Z" | "_" | "0".."9")*; public TerminalRule getIDRule() { return gaTerminals.getIDRule(); } //terminal INT returns ecore::EInt: // "0".."9"+; public TerminalRule getINTRule() { return gaTerminals.getINTRule(); } //terminal STRING: // "\"" ("\\" ("b" | "t" | "n" | "f" | "r" | "u" | "\"" | "\'" | "\\") | !("\\" | "\""))* "\"" | "\'" ("\\" ("b" | "t" | // "n" | "f" | "r" | "u" | "\"" | "\'" | "\\") | !("\\" | "\'"))* "\'"; public TerminalRule getSTRINGRule() { return gaTerminals.getSTRINGRule(); } //terminal ML_COMMENT: // "/ *"->"* /"; public TerminalRule getML_COMMENTRule() { return gaTerminals.getML_COMMENTRule(); } //terminal SL_COMMENT: // "//" !("\n" | "\r")* ("\r"? "\n")?; public TerminalRule getSL_COMMENTRule() { return gaTerminals.getSL_COMMENTRule(); } //terminal WS: // (" " | "\t" | "\r" | "\n")+; public TerminalRule getWSRule() { return gaTerminals.getWSRule(); } //terminal ANY_OTHER: // .; public TerminalRule getANY_OTHERRule() { return gaTerminals.getANY_OTHERRule(); } }
package me.vilsol.nmswrapper.wraps.unparsed; import com.mojang.authlib.GameProfile; import me.vilsol.nmswrapper.NMSWrapper; import me.vilsol.nmswrapper.reflections.ReflectiveClass; import me.vilsol.nmswrapper.reflections.ReflectiveMethod; import me.vilsol.nmswrapper.wraps.NMSItemStack; import me.vilsol.nmswrapper.wraps.NMSWorld; import org.bukkit.WeatherType; @ReflectiveClass(name = "EntityPlayer") public class NMSEntityPlayer extends NMSEntityHuman implements NMSICrafting { public NMSEntityPlayer(Object nmsObject){ super(nmsObject); } public NMSEntityPlayer(NMSMinecraftServer minecraftServer, NMSWorldServer worldServer, GameProfile gameProfile, NMSPlayerInteractManager playerInteractManager){ super("EntityPlayer", new Object[]{NMSMinecraftServer.class, NMSWorldServer.class, GameProfile.class, NMSPlayerInteractManager.class}, new Object[]{minecraftServer, worldServer, gameProfile, playerInteractManager}); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#B() */ @ReflectiveMethod(name = "B", types = {}) public void B(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#C() */ @ReflectiveMethod(name = "C", types = {}) public NMSEntity C(){ return new NMSEntity(NMSWrapper.getInstance().exec(nmsObject)); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#D() */ @ReflectiveMethod(name = "D", types = {}) public long D(){ return (long) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#ad() */ @ReflectiveMethod(name = "ad", types = {}) public boolean ad(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#ae() */ @ReflectiveMethod(name = "ae", types = {}) public boolean ae(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#attack(net.minecraft.server.v1_9_R1.Entity) */ @ReflectiveMethod(name = "attack", types = {NMSEntity.class}) public void attack(NMSEntity entity){ NMSWrapper.getInstance().exec(nmsObject, entity); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#b(net.minecraft.server.v1_9_R1.Entity) */ @ReflectiveMethod(name = "b", types = {NMSEntity.class}) public void b(NMSEntity entity){ NMSWrapper.getInstance().exec(nmsObject, entity); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#broadcastCarriedItem() */ @ReflectiveMethod(name = "broadcastCarriedItem", types = {}) public void broadcastCarriedItem(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#c(net.minecraft.server.v1_9_R1.Entity) */ @ReflectiveMethod(name = "c", types = {NMSEntity.class}) public void c(NMSEntity entity){ NMSWrapper.getInstance().exec(nmsObject, entity); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#closeInventory() */ @ReflectiveMethod(name = "closeInventory", types = {}) public void closeInventory(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#copyTo(net.minecraft.server.v1_9_R1.EntityHuman, boolean) */ @ReflectiveMethod(name = "copyTo", types = {NMSEntityHuman.class, boolean.class}) public void copyTo(NMSEntityHuman entityHuman, boolean b){ NMSWrapper.getInstance().exec(nmsObject, entityHuman, b); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#d(net.minecraft.server.v1_9_R1.Entity) */ @ReflectiveMethod(name = "d", types = {NMSEntity.class}) public void d(NMSEntity entity){ NMSWrapper.getInstance().exec(nmsObject, entity); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#damageEntity(net.minecraft.server.v1_9_R1.DamageSource, float) */ @ReflectiveMethod(name = "damageEntity", types = {NMSDamageSource.class, float.class}) public boolean damageEntity(NMSDamageSource damageSource, float f){ return (boolean) NMSWrapper.getInstance().exec(nmsObject, damageSource, f); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#die(net.minecraft.server.v1_9_R1.DamageSource) */ @ReflectiveMethod(name = "die", types = {NMSDamageSource.class}) public void die(NMSDamageSource damageSource){ NMSWrapper.getInstance().exec(nmsObject, damageSource); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#enderTeleportTo(double, double, double) */ @ReflectiveMethod(name = "enderTeleportTo", types = {double.class, double.class, double.class}) public void enderTeleportTo(double d, double d1, double d2){ NMSWrapper.getInstance().exec(nmsObject, d, d1, d2); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#enterCombat() */ @ReflectiveMethod(name = "enterCombat", types = {}) public void enterCombat(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#exitCombat() */ @ReflectiveMethod(name = "exitCombat", types = {}) public void exitCombat(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#getChatFlags() */ @ReflectiveMethod(name = "getChatFlags", types = {}) public NMSEnumChatVisibility getChatFlags(){ return new NMSEnumChatVisibility(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#getChunkCoordinates() */ @ReflectiveMethod(name = "getChunkCoordinates", types = {}) public NMSBlockPosition getChunkCoordinates(){ return new NMSBlockPosition(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#getPlayerListName() */ @ReflectiveMethod(name = "getPlayerListName", types = {}) public NMSIChatBaseComponent getPlayerListName(){ return (NMSIChatBaseComponent) NMSWrapper.getInstance().createApplicableObject(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#getPlayerTime() */ @ReflectiveMethod(name = "getPlayerTime", types = {}) public long getPlayerTime(){ return (long) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#getPlayerWeather() */ @ReflectiveMethod(name = "getPlayerWeather", types = {}) public WeatherType getPlayerWeather(){ return (WeatherType) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#getStatisticManager() */ @ReflectiveMethod(name = "getStatisticManager", types = {}) public NMSServerStatisticManager getStatisticManager(){ return new NMSServerStatisticManager(NMSWrapper.getInstance().exec(nmsObject)); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#i_() */ @ReflectiveMethod(name = "i_", types = {}) public void i_(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#isSpectator() */ @ReflectiveMethod(name = "isSpectator", types = {}) public boolean isSpectator(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#l() */ @ReflectiveMethod(name = "l", types = {}) public void l(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#levelDown(int) */ @ReflectiveMethod(name = "levelDown", types = {int.class}) public void levelDown(int i){ NMSWrapper.getInstance().exec(nmsObject, i); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#mount(net.minecraft.server.v1_9_R1.Entity) */ @ReflectiveMethod(name = "mount", types = {NMSEntity.class}) public void mount(NMSEntity entity){ NMSWrapper.getInstance().exec(nmsObject, entity); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#nextContainerCounter() */ @ReflectiveMethod(name = "nextContainerCounter", types = {}) public int nextContainerCounter(){ return (int) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#openBook(net.minecraft.server.v1_9_R1.ItemStack) */ @ReflectiveMethod(name = "openBook", types = {NMSItemStack.class}) public void openBook(NMSItemStack itemStack){ NMSWrapper.getInstance().exec(nmsObject, itemStack); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#openContainer(net.minecraft.server.v1_9_R1.IInventory) */ @ReflectiveMethod(name = "openContainer", types = {NMSIInventory.class}) public void openContainer(NMSIInventory iInventory){ NMSWrapper.getInstance().exec(nmsObject, iInventory); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#openHorseInventory(net.minecraft.server.v1_9_R1.EntityHorse, net.minecraft.server.v1_9_R1.IInventory) */ @ReflectiveMethod(name = "openHorseInventory", types = {NMSEntityHorse.class, NMSIInventory.class}) public void openHorseInventory(NMSEntityHorse entityHorse, NMSIInventory iInventory){ NMSWrapper.getInstance().exec(nmsObject, entityHorse, iInventory); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#openSign(net.minecraft.server.v1_9_R1.TileEntitySign) */ @ReflectiveMethod(name = "openSign", types = {NMSTileEntitySign.class}) public void openSign(NMSTileEntitySign tileEntitySign){ NMSWrapper.getInstance().exec(nmsObject, tileEntitySign); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#openTileEntity(net.minecraft.server.v1_9_R1.ITileEntityContainer) */ @ReflectiveMethod(name = "openTileEntity", types = {NMSITileEntityContainer.class}) public void openTileEntity(NMSITileEntityContainer iTileEntityContainer){ NMSWrapper.getInstance().exec(nmsObject, iTileEntityContainer); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#openTrade(net.minecraft.server.v1_9_R1.IMerchant) */ @ReflectiveMethod(name = "openTrade", types = {NMSIMerchant.class}) public void openTrade(NMSIMerchant iMerchant){ NMSWrapper.getInstance().exec(nmsObject, iMerchant); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#p() */ @ReflectiveMethod(name = "p", types = {}) public void p(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#q() */ @ReflectiveMethod(name = "q", types = {}) public void q(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#receive(net.minecraft.server.v1_9_R1.Entity, int) */ @ReflectiveMethod(name = "receive", types = {NMSEntity.class, int.class}) public void receive(NMSEntity entity, int i){ NMSWrapper.getInstance().exec(nmsObject, entity, i); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#reset() */ @ReflectiveMethod(name = "reset", types = {}) public void reset(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#resetIdleTimer() */ @ReflectiveMethod(name = "resetIdleTimer", types = {}) public void resetIdleTimer(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#resetPlayerWeather() */ @ReflectiveMethod(name = "resetPlayerWeather", types = {}) public void resetPlayerWeather(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#s() */ @ReflectiveMethod(name = "s", types = {}) public void s(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#sendMessage(net.minecraft.server.v1_9_R1.IChatBaseComponent) */ @ReflectiveMethod(name = "sendMessage", types = {NMSIChatBaseComponent.class}) public void sendMessage(NMSIChatBaseComponent iChatBaseComponent){ NMSWrapper.getInstance().exec(nmsObject, iChatBaseComponent); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#setContainerData(net.minecraft.server.v1_9_R1.Container, net.minecraft.server.v1_9_R1.IInventory) */ @ReflectiveMethod(name = "setContainerData", types = {NMSContainer.class, NMSIInventory.class}) public void setContainerData(NMSContainer container, NMSIInventory iInventory){ NMSWrapper.getInstance().exec(nmsObject, container, iInventory); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#setPlayerWeather(org.bukkit.WeatherType, boolean) */ @ReflectiveMethod(name = "setPlayerWeather", types = {WeatherType.class, boolean.class}) public void setPlayerWeather(WeatherType weatherType, boolean b){ NMSWrapper.getInstance().exec(nmsObject, weatherType, b); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#setResourcePack(java.lang.String, java.lang.String) */ @ReflectiveMethod(name = "setResourcePack", types = {String.class, String.class}) public void setResourcePack(String s, String s1){ NMSWrapper.getInstance().exec(nmsObject, s, s1); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#setSpectatorTarget(net.minecraft.server.v1_9_R1.Entity) */ @ReflectiveMethod(name = "setSpectatorTarget", types = {NMSEntity.class}) public void setSpectatorTarget(NMSEntity entity){ NMSWrapper.getInstance().exec(nmsObject, entity); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#spawnIn(net.minecraft.server.v1_9_R1.World) */ @ReflectiveMethod(name = "spawnIn", types = {NMSWorld.class}) public void spawnIn(NMSWorld world){ NMSWrapper.getInstance().exec(nmsObject, world); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#syncInventory() */ @ReflectiveMethod(name = "syncInventory", types = {}) public void syncInventory(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#t_() */ @ReflectiveMethod(name = "t_", types = {}) public void t_(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#tickWeather() */ @ReflectiveMethod(name = "tickWeather", types = {}) public void tickWeather(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#toString() */ @ReflectiveMethod(name = "toString", types = {}) public String toString(){ return (String) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#triggerHealthUpdate() */ @ReflectiveMethod(name = "triggerHealthUpdate", types = {}) public void triggerHealthUpdate(){ NMSWrapper.getInstance().exec(nmsObject); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#u() */ @ReflectiveMethod(name = "u", types = {}) public NMSWorldServer u(){ return new NMSWorldServer(NMSWrapper.getInstance().exec(nmsObject)); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#updateAbilities() */ @ReflectiveMethod(name = "updateAbilities", types = {}) public void updateAbilities(){ NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#updateInventory(net.minecraft.server.v1_9_R1.Container) */ @ReflectiveMethod(name = "updateInventory", types = {NMSContainer.class}) public void updateInventory(NMSContainer container){ NMSWrapper.getInstance().exec(nmsObject, container); } /** * @see net.minecraft.server.v1_9_R1.EntityPlayer#updateWeather(float, float, float, float) */ @ReflectiveMethod(name = "updateWeather", types = {float.class, float.class, float.class, float.class}) public void updateWeather(float f, float f1, float f2, float f3){ NMSWrapper.getInstance().exec(nmsObject, f, f1, f2, f3); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntityPlayer#w() */ @ReflectiveMethod(name = "w", types = {}) public String w(){ return (String) NMSWrapper.getInstance().exec(nmsObject); } }
package com.swfarm.biz.chain.dto; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.Iterator; import java.util.List; import org.apache.commons.lang.StringUtils; import com.swfarm.pub.framework.OrderItem; import com.swfarm.pub.utils.DateUtils; public class SearchReturnPurchaseTasksCriteria implements Serializable { private static final long serialVersionUID = 1L; private String returnPurchaseTaskNo; private List<String> returnPurchaseTaskNos = new ArrayList<String>(); private String assistantProductManagerNo; private String vendorName; private String articleNumber; private List articleNumbers = new ArrayList(); private Date startDate; private Date endDate; private String[] returnPurchaseTaskProcessSteps; private String exceptionalProcessStep; private Double lowerTotalPrice; private Double higherTotalPrice; private List orderItemList = new ArrayList(); public String getReturnPurchaseTaskNo() { return returnPurchaseTaskNo; } public void setReturnPurchaseTaskNo(String returnPurchaseTaskNo) { this.returnPurchaseTaskNo = returnPurchaseTaskNo; } public void setReturnPurchaseTaskNos(List<String> returnPurchaseTaskNos) { this.returnPurchaseTaskNos = returnPurchaseTaskNos; } public List<String> getReturnPurchaseTaskNos() { return returnPurchaseTaskNos; } public String getAssistantProductManagerNo() { return assistantProductManagerNo; } public void setAssistantProductManagerNo(String assistantProductManagerNo) { this.assistantProductManagerNo = assistantProductManagerNo; } public String getVendorName() { return vendorName; } public void setVendorName(String vendorName) { this.vendorName = vendorName; } public String getArticleNumber() { return articleNumber; } public void setArticleNumber(String articleNumber) { this.articleNumber = articleNumber; } public Date getStartDate() { return startDate; } public void setStartDate(Date startDate) { this.startDate = startDate; } public Date getEndDate() { return endDate; } public void setEndDate(Date endDate) { this.endDate = endDate; } public List getArticleNumbers() { if (articleNumbers == null) { articleNumbers = new ArrayList(); } return articleNumbers; } public void setArticleNumbers(List articleNumbers) { this.articleNumbers = articleNumbers; } public String[] getReturnPurchaseTaskProcessSteps() { return returnPurchaseTaskProcessSteps; } public void setReturnPurchaseTaskProcessSteps(String[] returnPurchaseTaskProcessSteps) { this.returnPurchaseTaskProcessSteps = returnPurchaseTaskProcessSteps; } public String getExceptionalProcessStep() { return exceptionalProcessStep; } public void setExceptionalProcessStep(String exceptionalProcessStep) { this.exceptionalProcessStep = exceptionalProcessStep; } public Double getLowerTotalPrice() { return lowerTotalPrice; } public void setLowerTotalPrice(Double lowerTotalPrice) { this.lowerTotalPrice = lowerTotalPrice; } public Double getHigherTotalPrice() { return higherTotalPrice; } public void setHigherTotalPrice(Double higherTotalPrice) { this.higherTotalPrice = higherTotalPrice; } public String getStartDateString() { if (startDate != null) { return DateUtils.dateToString(startDate, "yyyy/MM/dd"); } else { return StringUtils.EMPTY; } } public String getEndDateString() { if (endDate != null) { return DateUtils.dateToString(endDate, "yyyy/MM/dd"); } else { return StringUtils.EMPTY; } } public String getPurchaseTaskProcessStepJsArr() { StringBuffer jsBuffer = new StringBuffer(); jsBuffer.append("["); if (this.returnPurchaseTaskProcessSteps != null && this.returnPurchaseTaskProcessSteps.length > 0) { for (int i = 0; i < this.returnPurchaseTaskProcessSteps.length; i++) { jsBuffer.append("'"); jsBuffer.append(this.returnPurchaseTaskProcessSteps[i]); jsBuffer.append("'"); if (i < this.returnPurchaseTaskProcessSteps.length - 1) { jsBuffer.append(","); } } } jsBuffer.append("]"); return jsBuffer.toString(); } public List getOrderItemList() { if (orderItemList == null) { orderItemList = new ArrayList(); } Collections.sort(orderItemList, new Comparator() { public int compare(Object o1, Object o2) { OrderItem orderItem1 = (OrderItem) o1; OrderItem orderItem2 = (OrderItem) o2; return orderItem1.getIndex().compareTo(orderItem2.getIndex()); } }); return orderItemList; } public void addOrderItem(OrderItem orderItem) { List orderItemList = getOrderItemList(); if (!orderItemList.contains(orderItem)) { orderItemList.add(orderItem); } } public void removeOrderItem(OrderItem orderItem) { List orderItemList = getOrderItemList(); if (orderItemList.contains(orderItem)) { orderItemList.remove(orderItem); } } public String getOrderItemJsString() { getOrderItemList(); StringBuffer sb = new StringBuffer(); sb.append("["); for (Iterator iter = orderItemList.iterator(); iter.hasNext();) { OrderItem orderItem = (OrderItem) iter.next(); sb.append("["); sb.append("'" + orderItem.getField() + "'"); sb.append(","); sb.append("'" + orderItem.getStatus() + "'"); sb.append(","); sb.append("'" + orderItem.getIndex() + "'"); sb.append("]"); if (iter.hasNext()) { sb.append(","); } } sb.append("]"); return sb.toString(); } public void setOrderItemList(List orderItemList) { this.orderItemList = orderItemList; } public String getReturnPurchaseTaskProcessStepJsArr() { StringBuffer jsBuffer = new StringBuffer(); jsBuffer.append("["); if (this.returnPurchaseTaskProcessSteps != null && this.returnPurchaseTaskProcessSteps.length > 0) { for (int i = 0; i < this.returnPurchaseTaskProcessSteps.length; i++) { jsBuffer.append("'"); jsBuffer.append(this.returnPurchaseTaskProcessSteps[i]); jsBuffer.append("'"); if (i < this.returnPurchaseTaskProcessSteps.length - 1) { jsBuffer.append(","); } } } jsBuffer.append("]"); return jsBuffer.toString(); } }
import java.util.Random; import java.util.Scanner; public class Battleship { protected final static int boardSize = 10; static Board user; static Board cpu; static Ship patrolBoat; static Ship submarine; static Ship destroyer; static Ship battleship; static Ship carrier; static Scanner in; static Random random; static int count = 0; static int cpuDificulty; static double cpuDifRate; static String cpuSecondShotMessage = ""; public static void main(String[] args){ in = new Scanner(System.in); random = new Random(); System.out.println("\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nYour ship's position's have been randomly generated.\n\nSometimes the CPU will fire two shots at once since it's not taking time to aim and fires completely randomly.\n\nFor optimal gameplay please set your console's height to at least 31 lines and width to at least 76 lines.\n\nBEGIN GAME!"); System.out.println("\nEnter a number for the cpu dificulty (1 super easy - 5 expert):"); cpuDificulty = Integer.valueOf(checkValidDificulty(in.nextLine())); switch(cpuDificulty){ case 1: cpuDifRate = .15; break; case 2: cpuDifRate = .25; break; case 3: cpuDifRate = .4; break; case 4: cpuDifRate = .55; break; case 5: cpuDifRate = .7; break; } System.out.println("\nPress enter to start!"); in.nextLine(); cpu = new Board("CPU Board"); user = new Board("User Board"); while(hasNextTurn(cpu) && hasNextTurn(user)){ count++; printGame(); System.out.println(cpuSecondShotMessage); int x; int y; System.out.println("Enter X-Coordinate (1-10) then press enter: " ); x = userInput(in.nextLine()); System.out.println("Enter Y-Coordinate (1-10) then press enter: " ); y = userInput(in.nextLine()); boolean stop1 = false; while (!stop1){ if (cpu.getBoard()[x][y] % 100 != 0){ printGame(); System.out.println("Enter a new location!"); System.out.println("Enter X-Coordinate (1-10) then press enter: " ); x = userInput(in.nextLine()); System.out.println("Enter Y-Coordinate (1-10) then press enter: " ); y = userInput(in.nextLine()); } else stop1 = true; } cpu.fireShot(x, y); int cpux = random.nextInt(10); int cpuy = random.nextInt(10); boolean stop = false; while (!stop){ if (user.getBoard()[cpux][cpuy] % 100 != 0){ cpux = random.nextInt(10); cpuy = random.nextInt(10); } else stop = true; } user.fireShot(cpux, cpuy); double cpuSecondShot = random.nextDouble(); if (cpuSecondShot < cpuDifRate){ if (cpuSecondShot < .2 * cpuDifRate){ cpuSecondShotMessage = "Uh oh! The CPU got a second shot off!\n"; } else if (cpuSecondShot < .4 * cpuDifRate){ cpuSecondShotMessage = "Watch out! The CPU got a second shot off!\n"; } else if (cpuSecondShot < .6 * cpuDifRate){ cpuSecondShotMessage = "Be careful! The CPU got a second shot off!\n"; } else if (cpuSecondShot < .8 * cpuDifRate){ cpuSecondShotMessage = "Ahhhh! The CPU got a second shot off!\n"; } else{ cpuSecondShotMessage = "Duck for cover! The CPU got a second shot off!\n"; } cpux = random.nextInt(10); cpuy = random.nextInt(10); stop = false; while (!stop){ if (user.getBoard()[cpux][cpuy] % 100 != 0){ cpux = random.nextInt(10); cpuy = random.nextInt(10); } else stop = true; } user.fireShot(cpux, cpuy); } else{ cpuSecondShotMessage = ""; } } in.close(); } private static int userInput(String i){ i = checkValidInput(i); return Integer.valueOf(i) - 1; } private static String checkValidInput(String check){ if(!check.matches("[0-9]+")){ System.out.println("Try again, with a number this time: "); check = checkValidInput(in.nextLine()); } if(Integer.valueOf(check) > 10 || Integer.valueOf(check) < 1){ System.out.println("Try again, with a number from 1-10: "); check = checkValidInput(in.nextLine()); } return check; } private static String checkValidDificulty(String check){ if(!check.matches("[1-5]+")){ System.out.println("Try again, with a number between 1 and 5: "); check = checkValidDificulty(in.nextLine()); } return check; } private static boolean hasNextTurn(Board board){ for(int i = boardSize - 1; i >= 0; i--){ for(int j = 0; j < boardSize; j++){ if(board.getBoard()[j][i] == 100) return true; } System.out.println(); } String winner = ""; if(board.toString().equals("CPU Board")) winner = "User"; else winner = "CPU"; printGame(); System.out.printf("%s Wins!!!\n\n", winner); return false; } private static void printBoard(Board board, boolean hide){ System.out.println(board.toString()); int i; for(i = boardSize - 1; i >= 0; i--){ for(int j = 0; j < boardSize; j++){ int key = board.getBoard()[j][i]; if (key == 0) System.out.printf(".... "); else if (key == 100 && hide == true) System.out.printf(".... "); else if (key == 100 && hide == false) System.out.printf("|||| "); else if (key == 1) System.out.printf("miss "); else if (key == 101) System.out.printf("hit! "); else System.out.printf("EROR "); } if (i == 9) System.out.printf(" Ship Status:"); if (i == 8) System.out.printf(" Aircraft Carrier: " + board.getCarrier().toString()); if (i == 7) System.out.printf(" Battleship: " + board.getBattleship().toString()); if (i == 6) System.out.printf(" Destroyer: " + board.getDestroyer().toString()); if (i == 5) System.out.printf(" Submarine: " + board.getSubmarine().toString()); if (i == 4) System.out.printf(" Patrol Boat: " + board.getPatrolBoat().toString()); System.out.println(); } System.out.println(); } private static void printGame(){ //clear console for(int i = 0; i < 100; i++) System.out.println(); printBoard(cpu, true); printBoard(user, false); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.directory.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ds-2015-04-16/ListIpRoutes" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListIpRoutesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * Identifier (ID) of the directory for which you want to retrieve the IP addresses. * </p> */ private String directoryId; /** * <p> * The <i>ListIpRoutes.NextToken</i> value from a previous call to <a>ListIpRoutes</a>. Pass null if this is the * first call. * </p> */ private String nextToken; /** * <p> * Maximum number of items to return. If this value is zero, the maximum number of items is specified by the * limitations of the operation. * </p> */ private Integer limit; /** * <p> * Identifier (ID) of the directory for which you want to retrieve the IP addresses. * </p> * * @param directoryId * Identifier (ID) of the directory for which you want to retrieve the IP addresses. */ public void setDirectoryId(String directoryId) { this.directoryId = directoryId; } /** * <p> * Identifier (ID) of the directory for which you want to retrieve the IP addresses. * </p> * * @return Identifier (ID) of the directory for which you want to retrieve the IP addresses. */ public String getDirectoryId() { return this.directoryId; } /** * <p> * Identifier (ID) of the directory for which you want to retrieve the IP addresses. * </p> * * @param directoryId * Identifier (ID) of the directory for which you want to retrieve the IP addresses. * @return Returns a reference to this object so that method calls can be chained together. */ public ListIpRoutesRequest withDirectoryId(String directoryId) { setDirectoryId(directoryId); return this; } /** * <p> * The <i>ListIpRoutes.NextToken</i> value from a previous call to <a>ListIpRoutes</a>. Pass null if this is the * first call. * </p> * * @param nextToken * The <i>ListIpRoutes.NextToken</i> value from a previous call to <a>ListIpRoutes</a>. Pass null if this is * the first call. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The <i>ListIpRoutes.NextToken</i> value from a previous call to <a>ListIpRoutes</a>. Pass null if this is the * first call. * </p> * * @return The <i>ListIpRoutes.NextToken</i> value from a previous call to <a>ListIpRoutes</a>. Pass null if this is * the first call. */ public String getNextToken() { return this.nextToken; } /** * <p> * The <i>ListIpRoutes.NextToken</i> value from a previous call to <a>ListIpRoutes</a>. Pass null if this is the * first call. * </p> * * @param nextToken * The <i>ListIpRoutes.NextToken</i> value from a previous call to <a>ListIpRoutes</a>. Pass null if this is * the first call. * @return Returns a reference to this object so that method calls can be chained together. */ public ListIpRoutesRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * Maximum number of items to return. If this value is zero, the maximum number of items is specified by the * limitations of the operation. * </p> * * @param limit * Maximum number of items to return. If this value is zero, the maximum number of items is specified by the * limitations of the operation. */ public void setLimit(Integer limit) { this.limit = limit; } /** * <p> * Maximum number of items to return. If this value is zero, the maximum number of items is specified by the * limitations of the operation. * </p> * * @return Maximum number of items to return. If this value is zero, the maximum number of items is specified by the * limitations of the operation. */ public Integer getLimit() { return this.limit; } /** * <p> * Maximum number of items to return. If this value is zero, the maximum number of items is specified by the * limitations of the operation. * </p> * * @param limit * Maximum number of items to return. If this value is zero, the maximum number of items is specified by the * limitations of the operation. * @return Returns a reference to this object so that method calls can be chained together. */ public ListIpRoutesRequest withLimit(Integer limit) { setLimit(limit); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDirectoryId() != null) sb.append("DirectoryId: ").append(getDirectoryId()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getLimit() != null) sb.append("Limit: ").append(getLimit()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListIpRoutesRequest == false) return false; ListIpRoutesRequest other = (ListIpRoutesRequest) obj; if (other.getDirectoryId() == null ^ this.getDirectoryId() == null) return false; if (other.getDirectoryId() != null && other.getDirectoryId().equals(this.getDirectoryId()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDirectoryId() == null) ? 0 : getDirectoryId().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); return hashCode; } @Override public ListIpRoutesRequest clone() { return (ListIpRoutesRequest) super.clone(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.clusterframework; import org.apache.flink.configuration.AkkaOptions; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.ConfigOption; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.CoreOptions; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.util.NetUtils; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelException; import akka.actor.ActorSystem; import com.typesafe.config.Config; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.net.BindException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import scala.Some; import scala.Tuple2; import static org.apache.flink.configuration.ConfigOptions.key; /** * Tools for starting JobManager and TaskManager processes, including the * Actor Systems used to run the JobManager and TaskManager actors. */ public class BootstrapTools { /** * Internal option which says if default value is used for {@link CoreOptions#TMP_DIRS}. */ private static final ConfigOption<Boolean> USE_LOCAL_DEFAULT_TMP_DIRS = key("internal.io.tmpdirs.use-local-default") .defaultValue(false); private static final Logger LOG = LoggerFactory.getLogger(BootstrapTools.class); /** * Starts an ActorSystem with the given configuration listening at the address/ports. * @param configuration The Flink configuration * @param listeningAddress The address to listen at. * @param portRangeDefinition The port range to choose a port from. * @param logger The logger to output log information. * @return The ActorSystem which has been started * @throws Exception Thrown when actor system cannot be started in specified port range */ public static ActorSystem startActorSystem( Configuration configuration, String listeningAddress, String portRangeDefinition, Logger logger) throws Exception { return startActorSystem( configuration, listeningAddress, portRangeDefinition, logger, ForkJoinExecutorConfiguration.fromConfiguration(configuration)); } /** * Starts an ActorSystem with the given configuration listening at the address/ports. * * @param configuration The Flink configuration * @param listeningAddress The address to listen at. * @param portRangeDefinition The port range to choose a port from. * @param logger The logger to output log information. * @param actorSystemExecutorConfiguration configuration for the ActorSystem's underlying executor * @return The ActorSystem which has been started * @throws Exception Thrown when actor system cannot be started in specified port range */ public static ActorSystem startActorSystem( Configuration configuration, String listeningAddress, String portRangeDefinition, Logger logger, @Nonnull ActorSystemExecutorConfiguration actorSystemExecutorConfiguration) throws Exception { return startActorSystem( configuration, AkkaUtils.getFlinkActorSystemName(), listeningAddress, portRangeDefinition, logger, actorSystemExecutorConfiguration); } /** * Starts an ActorSystem with the given configuration listening at the address/ports. * * @param configuration The Flink configuration * @param actorSystemName Name of the started {@link ActorSystem} * @param listeningAddress The address to listen at. * @param portRangeDefinition The port range to choose a port from. * @param logger The logger to output log information. * @param actorSystemExecutorConfiguration configuration for the ActorSystem's underlying executor * @return The ActorSystem which has been started * @throws Exception Thrown when actor system cannot be started in specified port range */ public static ActorSystem startActorSystem( Configuration configuration, String actorSystemName, String listeningAddress, String portRangeDefinition, Logger logger, @Nonnull ActorSystemExecutorConfiguration actorSystemExecutorConfiguration) throws Exception { // parse port range definition and create port iterator Iterator<Integer> portsIterator; try { portsIterator = NetUtils.getPortRangeFromString(portRangeDefinition); } catch (Exception e) { throw new IllegalArgumentException("Invalid port range definition: " + portRangeDefinition); } while (portsIterator.hasNext()) { final int port = portsIterator.next(); try { return startActorSystem( configuration, actorSystemName, listeningAddress, port, logger, actorSystemExecutorConfiguration); } catch (Exception e) { // we can continue to try if this contains a netty channel exception Throwable cause = e.getCause(); if (!(cause instanceof org.jboss.netty.channel.ChannelException || cause instanceof java.net.BindException)) { throw e; } // else fall through the loop and try the next port } } // if we come here, we have exhausted the port range throw new BindException("Could not start actor system on any port in port range " + portRangeDefinition); } /** * Starts an Actor System at a specific port. * * @param configuration The Flink configuration. * @param listeningAddress The address to listen at. * @param listeningPort The port to listen at. * @param logger the logger to output log information. * @return The ActorSystem which has been started. * @throws Exception */ public static ActorSystem startActorSystem( Configuration configuration, String listeningAddress, int listeningPort, Logger logger) throws Exception { return startActorSystem( configuration, listeningAddress, listeningPort, logger, ForkJoinExecutorConfiguration.fromConfiguration(configuration)); } /** * Starts an Actor System at a specific port. * @param configuration The Flink configuration. * @param listeningAddress The address to listen at. * @param listeningPort The port to listen at. * @param logger the logger to output log information. * @param actorSystemExecutorConfiguration configuration for the ActorSystem's underlying executor * @return The ActorSystem which has been started. * @throws Exception */ public static ActorSystem startActorSystem( Configuration configuration, String listeningAddress, int listeningPort, Logger logger, ActorSystemExecutorConfiguration actorSystemExecutorConfiguration) throws Exception { return startActorSystem( configuration, AkkaUtils.getFlinkActorSystemName(), listeningAddress, listeningPort, logger, actorSystemExecutorConfiguration); } /** * Starts an Actor System at a specific port. * @param configuration The Flink configuration. * @param actorSystemName Name of the started {@link ActorSystem} * @param listeningAddress The address to listen at. * @param listeningPort The port to listen at. * @param logger the logger to output log information. * @param actorSystemExecutorConfiguration configuration for the ActorSystem's underlying executor * @return The ActorSystem which has been started. * @throws Exception */ public static ActorSystem startActorSystem( Configuration configuration, String actorSystemName, String listeningAddress, int listeningPort, Logger logger, ActorSystemExecutorConfiguration actorSystemExecutorConfiguration) throws Exception { String hostPortUrl = NetUtils.unresolvedHostAndPortToNormalizedString(listeningAddress, listeningPort); logger.info("Trying to start actor system at {}", hostPortUrl); try { Config akkaConfig = AkkaUtils.getAkkaConfig( configuration, new Some<>(new Tuple2<>(listeningAddress, listeningPort)), actorSystemExecutorConfiguration.getAkkaConfig()); logger.debug("Using akka configuration\n {}", akkaConfig); ActorSystem actorSystem = AkkaUtils.createActorSystem(actorSystemName, akkaConfig); logger.info("Actor system started at {}", AkkaUtils.getAddress(actorSystem)); return actorSystem; } catch (Throwable t) { if (t instanceof ChannelException) { Throwable cause = t.getCause(); if (cause != null && t.getCause() instanceof BindException) { throw new IOException("Unable to create ActorSystem at address " + hostPortUrl + " : " + cause.getMessage(), t); } } throw new Exception("Could not create actor system", t); } } /** * Writes a Flink YAML config file from a Flink Configuration object. * @param cfg The Flink config * @param file The File to write to * @throws IOException */ public static void writeConfiguration(Configuration cfg, File file) throws IOException { try (FileWriter fwrt = new FileWriter(file); PrintWriter out = new PrintWriter(fwrt)) { for (String key : cfg.keySet()) { String value = cfg.getString(key, null); out.print(key); out.print(": "); out.println(value); } } } /** * Sets the value of a new config key to the value of a deprecated config key. * @param config Config to write * @param deprecated The old config key * @param designated The new config key */ public static void substituteDeprecatedConfigKey(Configuration config, String deprecated, String designated) { // set the designated key only if it is not set already if (!config.containsKey(designated)) { final String valueForDeprecated = config.getString(deprecated, null); if (valueForDeprecated != null) { config.setString(designated, valueForDeprecated); } } } /** * Sets the value of a new config key to the value of a deprecated config key. Taking into * account the changed prefix. * @param config Config to write * @param deprecatedPrefix Old prefix of key * @param designatedPrefix New prefix of key */ public static void substituteDeprecatedConfigPrefix( Configuration config, String deprecatedPrefix, String designatedPrefix) { // set the designated key only if it is not set already final int prefixLen = deprecatedPrefix.length(); Configuration replacement = new Configuration(); for (String key : config.keySet()) { if (key.startsWith(deprecatedPrefix)) { String newKey = designatedPrefix + key.substring(prefixLen); if (!config.containsKey(newKey)) { replacement.setString(newKey, config.getString(key, null)); } } } config.addAll(replacement); } private static final String DYNAMIC_PROPERTIES_OPT = "D"; /** * Get an instance of the dynamic properties option. * * <p>Dynamic properties allow the user to specify additional configuration values with -D, such as * <tt> -Dfs.overwrite-files=true -Dtaskmanager.network.memory.min=536346624</tt> */ public static Option newDynamicPropertiesOption() { return new Option(DYNAMIC_PROPERTIES_OPT, true, "Dynamic properties"); } /** * Parse the dynamic properties (passed on the command line). */ public static Configuration parseDynamicProperties(CommandLine cmd) { final Configuration config = new Configuration(); String[] values = cmd.getOptionValues(DYNAMIC_PROPERTIES_OPT); if (values != null) { for (String value : values) { String[] pair = value.split("=", 2); if (pair.length == 1) { config.setString(pair[0], Boolean.TRUE.toString()); } else if (pair.length == 2) { config.setString(pair[0], pair[1]); } } } return config; } /** * Generates the shell command to start a task manager. * @param flinkConfig The Flink configuration. * @param tmParams Parameters for the task manager. * @param configDirectory The configuration directory for the flink-conf.yaml * @param logDirectory The log directory. * @param hasLogback Uses logback? * @param hasLog4j Uses log4j? * @param mainClass The main class to start with. * @return A String containing the task manager startup command. */ public static String getTaskManagerShellCommand( Configuration flinkConfig, ContaineredTaskManagerParameters tmParams, String configDirectory, String logDirectory, boolean hasLogback, boolean hasLog4j, boolean hasKrb5, Class<?> mainClass) { final Map<String, String> startCommandValues = new HashMap<>(); startCommandValues.put("java", "$JAVA_HOME/bin/java"); ArrayList<String> params = new ArrayList<>(); params.add(String.format("-Xms%dm", tmParams.taskManagerHeapSizeMB())); params.add(String.format("-Xmx%dm", tmParams.taskManagerHeapSizeMB())); if (tmParams.taskManagerDirectMemoryLimitMB() >= 0) { params.add(String.format("-XX:MaxDirectMemorySize=%dm", tmParams.taskManagerDirectMemoryLimitMB())); } startCommandValues.put("jvmmem", StringUtils.join(params, ' ')); String javaOpts = flinkConfig.getString(CoreOptions.FLINK_JVM_OPTIONS); if (flinkConfig.getString(CoreOptions.FLINK_TM_JVM_OPTIONS).length() > 0) { javaOpts += " " + flinkConfig.getString(CoreOptions.FLINK_TM_JVM_OPTIONS); } //applicable only for YarnMiniCluster secure test run //krb5.conf file will be available as local resource in JM/TM container if (hasKrb5) { javaOpts += " -Djava.security.krb5.conf=krb5.conf"; } startCommandValues.put("jvmopts", javaOpts); String logging = ""; if (hasLogback || hasLog4j) { logging = "-Dlog.file=" + logDirectory + "/taskmanager.log"; if (hasLogback) { logging += " -Dlogback.configurationFile=file:" + configDirectory + "/logback.xml"; } if (hasLog4j) { logging += " -Dlog4j.configuration=file:" + configDirectory + "/log4j.properties"; } } startCommandValues.put("logging", logging); startCommandValues.put("class", mainClass.getName()); startCommandValues.put("redirects", "1> " + logDirectory + "/taskmanager.out " + "2> " + logDirectory + "/taskmanager.err"); startCommandValues.put("args", "--configDir " + configDirectory); final String commandTemplate = flinkConfig .getString(ConfigConstants.YARN_CONTAINER_START_COMMAND_TEMPLATE, ConfigConstants.DEFAULT_YARN_CONTAINER_START_COMMAND_TEMPLATE); String startCommand = getStartCommand(commandTemplate, startCommandValues); LOG.debug("TaskManager start command: " + startCommand); return startCommand; } // ------------------------------------------------------------------------ /** Private constructor to prevent instantiation. */ private BootstrapTools() {} /** * Replaces placeholders in the template start command with values from startCommandValues. * * <p>If the default template {@link ConfigConstants#DEFAULT_YARN_CONTAINER_START_COMMAND_TEMPLATE} * is used, the following keys must be present in the map or the resulting * command will still contain placeholders: * <ul> * <li><tt>java</tt> = path to the Java executable</li> * <li><tt>jvmmem</tt> = JVM memory limits and tweaks</li> * <li><tt>jvmopts</tt> = misc options for the Java VM</li> * <li><tt>logging</tt> = logging-related configuration settings</li> * <li><tt>class</tt> = main class to execute</li> * <li><tt>args</tt> = arguments for the main class</li> * <li><tt>redirects</tt> = output redirects</li> * </ul> * * @param template * a template start command with placeholders * @param startCommandValues * a replacement map <tt>placeholder -&gt; value</tt> * * @return the start command with placeholders filled in */ public static String getStartCommand(String template, Map<String, String> startCommandValues) { for (Map.Entry<String, String> variable : startCommandValues .entrySet()) { template = template .replace("%" + variable.getKey() + "%", variable.getValue()); } return template; } /** * Set temporary configuration directories if necessary. * * @param configuration flink config to patch * @param defaultDirs in case no tmp directories is set, next directories will be applied */ public static void updateTmpDirectoriesInConfiguration( Configuration configuration, @Nullable String defaultDirs) { if (configuration.contains(CoreOptions.TMP_DIRS)) { LOG.info("Overriding Fink's temporary file directories with those " + "specified in the Flink config: {}", configuration.getValue(CoreOptions.TMP_DIRS)); } else if (defaultDirs != null) { LOG.info("Setting directories for temporary files to: {}", defaultDirs); configuration.setString(CoreOptions.TMP_DIRS, defaultDirs); configuration.setBoolean(USE_LOCAL_DEFAULT_TMP_DIRS, true); } } /** * Clones the given configuration and resets instance specific config options. * * @param configuration to clone * @return Cloned configuration with reset instance specific config options */ public static Configuration cloneConfiguration(Configuration configuration) { final Configuration clonedConfiguration = new Configuration(configuration); if (clonedConfiguration.getBoolean(USE_LOCAL_DEFAULT_TMP_DIRS)){ clonedConfiguration.removeConfig(CoreOptions.TMP_DIRS); clonedConfiguration.removeConfig(USE_LOCAL_DEFAULT_TMP_DIRS); } return clonedConfiguration; } /** * Configuration interface for {@link ActorSystem} underlying executor. */ public interface ActorSystemExecutorConfiguration { /** * Create the executor {@link Config} for the respective executor. * * @return Akka config for the respective executor */ Config getAkkaConfig(); } /** * Configuration for a fork join executor. */ public static class ForkJoinExecutorConfiguration implements ActorSystemExecutorConfiguration { private final double parallelismFactor; private final int minParallelism; private final int maxParallelism; public ForkJoinExecutorConfiguration(double parallelismFactor, int minParallelism, int maxParallelism) { this.parallelismFactor = parallelismFactor; this.minParallelism = minParallelism; this.maxParallelism = maxParallelism; } public double getParallelismFactor() { return parallelismFactor; } public int getMinParallelism() { return minParallelism; } public int getMaxParallelism() { return maxParallelism; } @Override public Config getAkkaConfig() { return AkkaUtils.getForkJoinExecutorConfig(this); } public static ForkJoinExecutorConfiguration fromConfiguration(final Configuration configuration) { final double parallelismFactor = configuration.getDouble(AkkaOptions.FORK_JOIN_EXECUTOR_PARALLELISM_FACTOR); final int minParallelism = configuration.getInteger(AkkaOptions.FORK_JOIN_EXECUTOR_PARALLELISM_MIN); final int maxParallelism = configuration.getInteger(AkkaOptions.FORK_JOIN_EXECUTOR_PARALLELISM_MAX); return new ForkJoinExecutorConfiguration(parallelismFactor, minParallelism, maxParallelism); } } /** * Configuration for a fixed thread pool executor. */ public static class FixedThreadPoolExecutorConfiguration implements ActorSystemExecutorConfiguration { private final int minNumThreads; private final int maxNumThreads; private final int threadPriority; public FixedThreadPoolExecutorConfiguration(int minNumThreads, int maxNumThreads, int threadPriority) { if (threadPriority < Thread.MIN_PRIORITY || threadPriority > Thread.MAX_PRIORITY) { throw new IllegalArgumentException( String.format( "The thread priority must be within (%s, %s) but it was %s.", Thread.MIN_PRIORITY, Thread.MAX_PRIORITY, threadPriority)); } this.minNumThreads = minNumThreads; this.maxNumThreads = maxNumThreads; this.threadPriority = threadPriority; } public int getMinNumThreads() { return minNumThreads; } public int getMaxNumThreads() { return maxNumThreads; } public int getThreadPriority() { return threadPriority; } @Override public Config getAkkaConfig() { return AkkaUtils.getThreadPoolExecutorConfig(this); } } }
/******************************************************************************* * Copyright (c) 2000, 2007 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.swt.examples.controlexample; import org.eclipse.swt.*; import org.eclipse.swt.layout.*; import org.eclipse.swt.widgets.*; import org.eclipse.swt.graphics.*; import org.eclipse.swt.events.*; class TreeTab extends ScrollableTab { /* Example widgets and groups that contain them */ Tree tree1, tree2; TreeItem textNode1, imageNode1; Group treeGroup, imageTreeGroup, itemGroup; /* Size widgets added to the "Size" group */ Button packColumnsButton; /* Style widgets added to the "Style" group */ Button noScrollButton, checkButton, fullSelectionButton; /* Other widgets added to the "Other" group */ Button multipleColumns, moveableColumns, resizableColumns, headerVisibleButton, sortIndicatorButton, headerImagesButton, subImagesButton, linesVisibleButton; /* Controls and resources added to the "Colors and Fonts" group */ static final int ITEM_FOREGROUND_COLOR = 3; static final int ITEM_BACKGROUND_COLOR = 4; static final int ITEM_FONT = 5; static final int CELL_FOREGROUND_COLOR = 6; static final int CELL_BACKGROUND_COLOR = 7; static final int CELL_FONT = 8; Color itemForegroundColor, itemBackgroundColor, cellForegroundColor, cellBackgroundColor; Font itemFont, cellFont; static String [] columnTitles = {ControlExample.getResourceString("TableTitle_0"), ControlExample.getResourceString("TableTitle_1"), ControlExample.getResourceString("TableTitle_2"), ControlExample.getResourceString("TableTitle_3")}; static String[][] tableData = { { ControlExample.getResourceString("TableLine0_0"), ControlExample.getResourceString("TableLine0_1"), ControlExample.getResourceString("TableLine0_2"), ControlExample.getResourceString("TableLine0_3") }, { ControlExample.getResourceString("TableLine1_0"), ControlExample.getResourceString("TableLine1_1"), ControlExample.getResourceString("TableLine1_2"), ControlExample.getResourceString("TableLine1_3") }, { ControlExample.getResourceString("TableLine2_0"), ControlExample.getResourceString("TableLine2_1"), ControlExample.getResourceString("TableLine2_2"), ControlExample.getResourceString("TableLine2_3") } }; Point menuMouseCoords; /** * Creates the Tab within a given instance of ControlExample. */ TreeTab(ControlExample instance) { super(instance); } /** * Creates the "Colors and Fonts" group. */ @Override void createColorAndFontGroup () { super.createColorAndFontGroup(); TableItem item = new TableItem(colorAndFontTable, SWT.None); item.setText(ControlExample.getResourceString ("Item_Foreground_Color")); item = new TableItem(colorAndFontTable, SWT.None); item.setText(ControlExample.getResourceString ("Item_Background_Color")); item = new TableItem(colorAndFontTable, SWT.None); item.setText(ControlExample.getResourceString ("Item_Font")); item = new TableItem(colorAndFontTable, SWT.None); item.setText(ControlExample.getResourceString ("Cell_Foreground_Color")); item = new TableItem(colorAndFontTable, SWT.None); item.setText(ControlExample.getResourceString ("Cell_Background_Color")); item = new TableItem(colorAndFontTable, SWT.None); item.setText(ControlExample.getResourceString ("Cell_Font")); shell.addDisposeListener(new DisposeListener() { public void widgetDisposed(DisposeEvent event) { if (itemBackgroundColor != null) itemBackgroundColor.dispose(); if (itemForegroundColor != null) itemForegroundColor.dispose(); if (itemFont != null) itemFont.dispose(); if (cellBackgroundColor != null) cellBackgroundColor.dispose(); if (cellForegroundColor != null) cellForegroundColor.dispose(); if (cellFont != null) cellFont.dispose(); itemBackgroundColor = null; itemForegroundColor = null; itemFont = null; cellBackgroundColor = null; cellForegroundColor = null; cellFont = null; } }); } @Override void changeFontOrColor(int index) { switch (index) { case ITEM_FOREGROUND_COLOR: { Color oldColor = itemForegroundColor; if (oldColor == null) oldColor = textNode1.getForeground (); colorDialog.setRGB(oldColor.getRGB()); RGB rgb = colorDialog.open(); if (rgb == null) return; oldColor = itemForegroundColor; itemForegroundColor = new Color (display, rgb); setItemForeground (); if (oldColor != null) oldColor.dispose (); } break; case ITEM_BACKGROUND_COLOR: { Color oldColor = itemBackgroundColor; if (oldColor == null) oldColor = textNode1.getBackground (); colorDialog.setRGB(oldColor.getRGB()); RGB rgb = colorDialog.open(); if (rgb == null) return; oldColor = itemBackgroundColor; itemBackgroundColor = new Color (display, rgb); setItemBackground (); if (oldColor != null) oldColor.dispose (); } break; case ITEM_FONT: { Font oldFont = itemFont; if (oldFont == null) oldFont = textNode1.getFont (); fontDialog.setFontList(oldFont.getFontData()); FontData fontData = fontDialog.open (); if (fontData == null) return; oldFont = itemFont; itemFont = new Font (display, fontData); setItemFont (); setExampleWidgetSize (); if (oldFont != null) oldFont.dispose (); } break; case CELL_FOREGROUND_COLOR: { Color oldColor = cellForegroundColor; if (oldColor == null) oldColor = textNode1.getForeground (1); colorDialog.setRGB(oldColor.getRGB()); RGB rgb = colorDialog.open(); if (rgb == null) return; oldColor = cellForegroundColor; cellForegroundColor = new Color (display, rgb); setCellForeground (); if (oldColor != null) oldColor.dispose (); } break; case CELL_BACKGROUND_COLOR: { Color oldColor = cellBackgroundColor; if (oldColor == null) oldColor = textNode1.getBackground (1); colorDialog.setRGB(oldColor.getRGB()); RGB rgb = colorDialog.open(); if (rgb == null) return; oldColor = cellBackgroundColor; cellBackgroundColor = new Color (display, rgb); setCellBackground (); if (oldColor != null) oldColor.dispose (); } break; case CELL_FONT: { Font oldFont = cellFont; if (oldFont == null) oldFont = textNode1.getFont (1); fontDialog.setFontList(oldFont.getFontData()); FontData fontData = fontDialog.open (); if (fontData == null) return; oldFont = cellFont; cellFont = new Font (display, fontData); setCellFont (); setExampleWidgetSize (); if (oldFont != null) oldFont.dispose (); } break; default: super.changeFontOrColor(index); } } /** * Creates the "Other" group. */ @Override void createOtherGroup () { super.createOtherGroup (); /* Create display controls specific to this example */ linesVisibleButton = new Button (otherGroup, SWT.CHECK); linesVisibleButton.setText (ControlExample.getResourceString("Lines_Visible")); multipleColumns = new Button (otherGroup, SWT.CHECK); multipleColumns.setText (ControlExample.getResourceString("Multiple_Columns")); headerVisibleButton = new Button (otherGroup, SWT.CHECK); headerVisibleButton.setText (ControlExample.getResourceString("Header_Visible")); sortIndicatorButton = new Button (otherGroup, SWT.CHECK); sortIndicatorButton.setText (ControlExample.getResourceString("Sort_Indicator")); moveableColumns = new Button (otherGroup, SWT.CHECK); moveableColumns.setText (ControlExample.getResourceString("Moveable_Columns")); resizableColumns = new Button (otherGroup, SWT.CHECK); resizableColumns.setText (ControlExample.getResourceString("Resizable_Columns")); headerImagesButton = new Button (otherGroup, SWT.CHECK); headerImagesButton.setText (ControlExample.getResourceString("Header_Images")); subImagesButton = new Button (otherGroup, SWT.CHECK); subImagesButton.setText (ControlExample.getResourceString("Sub_Images")); /* Add the listeners */ linesVisibleButton.addSelectionListener (new SelectionAdapter () { @Override public void widgetSelected (SelectionEvent event) { setWidgetLinesVisible (); } }); multipleColumns.addSelectionListener (new SelectionAdapter () { @Override public void widgetSelected (SelectionEvent event) { recreateExampleWidgets (); } }); headerVisibleButton.addSelectionListener (new SelectionAdapter () { @Override public void widgetSelected (SelectionEvent event) { setWidgetHeaderVisible (); } }); sortIndicatorButton.addSelectionListener (new SelectionAdapter () { @Override public void widgetSelected (SelectionEvent event) { setWidgetSortIndicator (); } }); moveableColumns.addSelectionListener (new SelectionAdapter () { @Override public void widgetSelected (SelectionEvent event) { setColumnsMoveable (); } }); resizableColumns.addSelectionListener (new SelectionAdapter () { @Override public void widgetSelected (SelectionEvent event) { setColumnsResizable (); } }); headerImagesButton.addSelectionListener (new SelectionAdapter () { @Override public void widgetSelected (SelectionEvent event) { recreateExampleWidgets (); } }); subImagesButton.addSelectionListener (new SelectionAdapter () { @Override public void widgetSelected (SelectionEvent event) { recreateExampleWidgets (); } }); } /** * Creates the "Example" group. */ @Override void createExampleGroup () { super.createExampleGroup (); /* Create a group for the text tree */ treeGroup = new Group (exampleGroup, SWT.NONE); treeGroup.setLayout (new GridLayout ()); treeGroup.setLayoutData (new GridData (SWT.FILL, SWT.FILL, true, true)); treeGroup.setText ("Tree"); /* Create a group for the image tree */ imageTreeGroup = new Group (exampleGroup, SWT.NONE); imageTreeGroup.setLayout (new GridLayout ()); imageTreeGroup.setLayoutData (new GridData (SWT.FILL, SWT.FILL, true, true)); imageTreeGroup.setText (ControlExample.getResourceString("Tree_With_Images")); } /** * Creates the "Example" widgets. */ @Override void createExampleWidgets () { /* Compute the widget style */ int style = getDefaultStyle(); if (singleButton.getSelection()) style |= SWT.SINGLE; if (multiButton.getSelection()) style |= SWT.MULTI; if (horizontalButton.getSelection ()) style |= SWT.H_SCROLL; if (verticalButton.getSelection ()) style |= SWT.V_SCROLL; if (noScrollButton.getSelection()) style |= SWT.NO_SCROLL; if (checkButton.getSelection()) style |= SWT.CHECK; if (fullSelectionButton.getSelection ()) style |= SWT.FULL_SELECTION; if (borderButton.getSelection()) style |= SWT.BORDER; /* Create the text tree */ tree1 = new Tree (treeGroup, style); boolean multiColumn = multipleColumns.getSelection(); if (multiColumn) { for (int i = 0; i < columnTitles.length; i++) { TreeColumn treeColumn = new TreeColumn(tree1, SWT.NONE); treeColumn.setText(columnTitles[i]); treeColumn.setToolTipText(ControlExample.getResourceString("Tooltip", new String [] {columnTitles[i]})); } tree1.setSortColumn(tree1.getColumn(0)); } for (int i = 0; i < 4; i++) { TreeItem item = new TreeItem (tree1, SWT.NONE); setItemText(item, i, ControlExample.getResourceString("Node_" + (i + 1))); if (i < 3) { TreeItem subitem = new TreeItem (item, SWT.NONE); setItemText(subitem, i, ControlExample.getResourceString("Node_" + (i + 1) + "_1")); } } TreeItem treeRoots[] = tree1.getItems (); TreeItem item = new TreeItem (treeRoots[1], SWT.NONE); setItemText(item, 1, ControlExample.getResourceString("Node_2_2")); item = new TreeItem (item, SWT.NONE); setItemText(item, 1, ControlExample.getResourceString("Node_2_2_1")); textNode1 = treeRoots[0]; packColumns(tree1); try { TreeColumn column = tree1.getColumn(0); resizableColumns.setSelection (column.getResizable()); } catch (IllegalArgumentException ex) {} /* Create the image tree */ tree2 = new Tree (imageTreeGroup, style); Image image = instance.images[ControlExample.ciClosedFolder]; if (multiColumn) { for (int i = 0; i < columnTitles.length; i++) { TreeColumn treeColumn = new TreeColumn(tree2, SWT.NONE); treeColumn.setText(columnTitles[i]); treeColumn.setToolTipText(ControlExample.getResourceString("Tooltip", new String [] {columnTitles[i]})); if (headerImagesButton.getSelection()) treeColumn.setImage(instance.images [i % 3]); } } for (int i = 0; i < 4; i++) { item = new TreeItem (tree2, SWT.NONE); setItemText(item, i, ControlExample.getResourceString("Node_" + (i + 1))); if (multiColumn && subImagesButton.getSelection()) { for (int j = 0; j < columnTitles.length; j++) { item.setImage(j, image); } } else { item.setImage(image); } if (i < 3) { TreeItem subitem = new TreeItem (item, SWT.NONE); setItemText(subitem, i, ControlExample.getResourceString("Node_" + (i + 1) + "_1")); if (multiColumn && subImagesButton.getSelection()) { for (int j = 0; j < columnTitles.length; j++) { subitem.setImage(j, image); } } else { subitem.setImage(image); } } } treeRoots = tree2.getItems (); item = new TreeItem (treeRoots[1], SWT.NONE); setItemText(item, 1, ControlExample.getResourceString("Node_2_2")); if (multiColumn && subImagesButton.getSelection()) { for (int j = 0; j < columnTitles.length; j++) { item.setImage(j, image); } } else { item.setImage(image); } item = new TreeItem (item, SWT.NONE); setItemText(item, 1, ControlExample.getResourceString("Node_2_2_1")); if (multiColumn && subImagesButton.getSelection()) { for (int j = 0; j < columnTitles.length; j++) { item.setImage(j, image); } } else { item.setImage(image); } imageNode1 = treeRoots[0]; packColumns(tree2); } void setItemText(TreeItem item, int i, String node) { int index = i % 3; if (multipleColumns.getSelection()) { tableData [index][0] = node; item.setText (tableData [index]); } else { item.setText (node); } } /** * Creates the "Size" group. The "Size" group contains * controls that allow the user to change the size of * the example widgets. */ @Override void createSizeGroup () { super.createSizeGroup(); packColumnsButton = new Button (sizeGroup, SWT.PUSH); packColumnsButton.setText (ControlExample.getResourceString("Pack_Columns")); packColumnsButton.addSelectionListener(new SelectionAdapter () { @Override public void widgetSelected (SelectionEvent event) { packColumns (tree1); packColumns (tree2); setExampleWidgetSize (); } }); } /** * Creates the "Style" group. */ @Override void createStyleGroup() { super.createStyleGroup(); /* Create the extra widgets */ noScrollButton = new Button (styleGroup, SWT.CHECK); noScrollButton.setText ("SWT.NO_SCROLL"); noScrollButton.moveAbove(borderButton); checkButton = new Button (styleGroup, SWT.CHECK); checkButton.setText ("SWT.CHECK"); fullSelectionButton = new Button (styleGroup, SWT.CHECK); fullSelectionButton.setText ("SWT.FULL_SELECTION"); } /** * Gets the "Example" widget children's items, if any. * * @return an array containing the example widget children's items */ @Override Item [] getExampleWidgetItems () { /* Note: We do not bother collecting the tree items * because tree items don't have any events. If events * are ever added to TreeItem, then this needs to change. */ Item [] columns1 = tree1.getColumns(); Item [] columns2 = tree2.getColumns(); Item [] allItems = new Item [columns1.length + columns2.length]; System.arraycopy(columns1, 0, allItems, 0, columns1.length); System.arraycopy(columns2, 0, allItems, columns1.length, columns2.length); return allItems; } /** * Gets the "Example" widget children. */ @Override Widget [] getExampleWidgets () { return new Widget [] {tree1, tree2}; } /** * Returns a list of set/get API method names (without the set/get prefix) * that can be used to set/get values in the example control(s). */ @Override String[] getMethodNames() { return new String[] {"ColumnOrder", "Selection", "ToolTipText", "TopItem"}; } @Override Object[] parameterForType(String typeName, String value, Widget widget) { if (typeName.equals("org.eclipse.swt.widgets.TreeItem")) { TreeItem item = findItem(value, ((Tree) widget).getItems()); if (item != null) return new Object[] {item}; } if (typeName.equals("[Lorg.eclipse.swt.widgets.TreeItem;")) { String[] values = split(value, ','); TreeItem[] items = new TreeItem[values.length]; for (int i = 0; i < values.length; i++) { TreeItem item = findItem(values[i], ((Tree) widget).getItems()); if (item == null) break; items[i] = item; } return new Object[] {items}; } return super.parameterForType(typeName, value, widget); } TreeItem findItem(String value, TreeItem[] items) { for (int i = 0; i < items.length; i++) { TreeItem item = items[i]; if (item.getText().equals(value)) return item; item = findItem(value, item.getItems()); if (item != null) return item; } return null; } /** * Gets the text for the tab folder item. */ @Override String getTabText () { return "Tree"; } void packColumns (Tree tree) { if (multipleColumns.getSelection()) { int columnCount = tree.getColumnCount(); for (int i = 0; i < columnCount; i++) { TreeColumn treeColumn = tree.getColumn(i); treeColumn.pack(); } } } /** * Sets the moveable columns state of the "Example" widgets. */ void setColumnsMoveable () { boolean selection = moveableColumns.getSelection(); TreeColumn[] columns1 = tree1.getColumns(); for (int i = 0; i < columns1.length; i++) { columns1[i].setMoveable(selection); } TreeColumn[] columns2 = tree2.getColumns(); for (int i = 0; i < columns2.length; i++) { columns2[i].setMoveable(selection); } } /** * Sets the resizable columns state of the "Example" widgets. */ void setColumnsResizable () { boolean selection = resizableColumns.getSelection(); TreeColumn[] columns1 = tree1.getColumns(); for (int i = 0; i < columns1.length; i++) { columns1[i].setResizable(selection); } TreeColumn[] columns2 = tree2.getColumns(); for (int i = 0; i < columns2.length; i++) { columns2[i].setResizable(selection); } } /** * Sets the foreground color, background color, and font * of the "Example" widgets to their default settings. * Also sets foreground and background color of the Node 1 * TreeItems to default settings. */ @Override void resetColorsAndFonts () { super.resetColorsAndFonts (); Color oldColor = itemForegroundColor; itemForegroundColor = null; setItemForeground (); if (oldColor != null) oldColor.dispose(); oldColor = itemBackgroundColor; itemBackgroundColor = null; setItemBackground (); if (oldColor != null) oldColor.dispose(); Font oldFont = font; itemFont = null; setItemFont (); if (oldFont != null) oldFont.dispose(); oldColor = cellForegroundColor; cellForegroundColor = null; setCellForeground (); if (oldColor != null) oldColor.dispose(); oldColor = cellBackgroundColor; cellBackgroundColor = null; setCellBackground (); if (oldColor != null) oldColor.dispose(); oldFont = font; cellFont = null; setCellFont (); if (oldFont != null) oldFont.dispose(); } /** * Sets the state of the "Example" widgets. */ @Override void setExampleWidgetState () { setItemBackground (); setItemForeground (); setItemFont (); setCellBackground (); setCellForeground (); setCellFont (); if (!instance.startup) { setColumnsMoveable (); setColumnsResizable (); setWidgetHeaderVisible (); setWidgetSortIndicator (); setWidgetLinesVisible (); } super.setExampleWidgetState (); noScrollButton.setSelection ((tree1.getStyle () & SWT.NO_SCROLL) != 0); checkButton.setSelection ((tree1.getStyle () & SWT.CHECK) != 0); fullSelectionButton.setSelection ((tree1.getStyle () & SWT.FULL_SELECTION) != 0); try { TreeColumn column = tree1.getColumn(0); moveableColumns.setSelection (column.getMoveable()); resizableColumns.setSelection (column.getResizable()); } catch (IllegalArgumentException ex) {} headerVisibleButton.setSelection (tree1.getHeaderVisible()); linesVisibleButton.setSelection (tree1.getLinesVisible()); } /** * Sets the background color of the Node 1 TreeItems in column 1. */ void setCellBackground () { if (!instance.startup) { textNode1.setBackground (1, cellBackgroundColor); imageNode1.setBackground (1, cellBackgroundColor); } /* Set the background color item's image to match the background color of the cell. */ Color color = cellBackgroundColor; if (color == null) color = textNode1.getBackground (1); TableItem item = colorAndFontTable.getItem(CELL_BACKGROUND_COLOR); Image oldImage = item.getImage(); if (oldImage != null) oldImage.dispose(); item.setImage (colorImage(color)); } /** * Sets the foreground color of the Node 1 TreeItems in column 1. */ void setCellForeground () { if (!instance.startup) { textNode1.setForeground (1, cellForegroundColor); imageNode1.setForeground (1, cellForegroundColor); } /* Set the foreground color item's image to match the foreground color of the cell. */ Color color = cellForegroundColor; if (color == null) color = textNode1.getForeground (1); TableItem item = colorAndFontTable.getItem(CELL_FOREGROUND_COLOR); Image oldImage = item.getImage(); if (oldImage != null) oldImage.dispose(); item.setImage (colorImage(color)); } /** * Sets the font of the Node 1 TreeItems in column 1. */ void setCellFont () { if (!instance.startup) { textNode1.setFont (1, cellFont); imageNode1.setFont (1, cellFont); } /* Set the font item's image to match the font of the item. */ Font ft = cellFont; if (ft == null) ft = textNode1.getFont (1); TableItem item = colorAndFontTable.getItem(CELL_FONT); Image oldImage = item.getImage(); if (oldImage != null) oldImage.dispose(); item.setImage (fontImage(ft)); item.setFont(ft); colorAndFontTable.layout (); } /** * Sets the background color of the Node 1 TreeItems. */ void setItemBackground () { if (!instance.startup) { textNode1.setBackground (itemBackgroundColor); imageNode1.setBackground (itemBackgroundColor); } /* Set the background button's color to match the background color of the item. */ Color color = itemBackgroundColor; if (color == null) color = textNode1.getBackground (); TableItem item = colorAndFontTable.getItem(ITEM_BACKGROUND_COLOR); Image oldImage = item.getImage(); if (oldImage != null) oldImage.dispose(); item.setImage (colorImage(color)); } /** * Sets the foreground color of the Node 1 TreeItems. */ void setItemForeground () { if (!instance.startup) { textNode1.setForeground (itemForegroundColor); imageNode1.setForeground (itemForegroundColor); } /* Set the foreground button's color to match the foreground color of the item. */ Color color = itemForegroundColor; if (color == null) color = textNode1.getForeground (); TableItem item = colorAndFontTable.getItem(ITEM_FOREGROUND_COLOR); Image oldImage = item.getImage(); if (oldImage != null) oldImage.dispose(); item.setImage (colorImage(color)); } /** * Sets the font of the Node 1 TreeItems. */ void setItemFont () { if (!instance.startup) { textNode1.setFont (itemFont); imageNode1.setFont (itemFont); } /* Set the font item's image to match the font of the item. */ Font ft = itemFont; if (ft == null) ft = textNode1.getFont (); TableItem item = colorAndFontTable.getItem(ITEM_FONT); Image oldImage = item.getImage(); if (oldImage != null) oldImage.dispose(); item.setImage (fontImage(ft)); item.setFont(ft); colorAndFontTable.layout (); } /** * Sets the header visible state of the "Example" widgets. */ void setWidgetHeaderVisible () { tree1.setHeaderVisible (headerVisibleButton.getSelection ()); tree2.setHeaderVisible (headerVisibleButton.getSelection ()); } /** * Sets the sort indicator state of the "Example" widgets. */ void setWidgetSortIndicator () { if (sortIndicatorButton.getSelection ()) { initializeSortState (tree1); initializeSortState (tree2); } else { resetSortState (tree1); resetSortState (tree2); } } /** * Sets the initial sort indicator state and adds a listener * to cycle through sort states and columns. */ void initializeSortState (final Tree tree) { /* Reset to known state: 'down' on column 0. */ tree.setSortDirection (SWT.DOWN); TreeColumn [] columns = tree.getColumns(); for (int i = 0; i < columns.length; i++) { TreeColumn column = columns[i]; if (i == 0) tree.setSortColumn(column); SelectionListener listener = new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { int sortDirection = SWT.DOWN; if (e.widget == tree.getSortColumn()) { /* If the sort column hasn't changed, cycle down -> up -> none. */ switch (tree.getSortDirection ()) { case SWT.DOWN: sortDirection = SWT.UP; break; case SWT.UP: sortDirection = SWT.NONE; break; } } else { tree.setSortColumn((TreeColumn)e.widget); } tree.setSortDirection (sortDirection); } }; column.addSelectionListener(listener); column.setData("SortListener", listener); //$NON-NLS-1$ } } void resetSortState (final Tree tree) { tree.setSortDirection (SWT.NONE); TreeColumn [] columns = tree.getColumns(); for (int i = 0; i < columns.length; i++) { SelectionListener listener = (SelectionListener)columns[i].getData("SortListener"); //$NON-NLS-1$ if (listener != null) columns[i].removeSelectionListener(listener); } } /** * Sets the lines visible state of the "Example" widgets. */ void setWidgetLinesVisible () { tree1.setLinesVisible (linesVisibleButton.getSelection ()); tree2.setLinesVisible (linesVisibleButton.getSelection ()); } @Override protected void specialPopupMenuItems(Menu menu, Event event) { MenuItem item = new MenuItem(menu, SWT.PUSH); item.setText("getItem(Point) on mouse coordinates"); final Tree t = (Tree) event.widget; menuMouseCoords = t.toControl(new Point(event.x, event.y)); item.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { eventConsole.append ("getItem(Point(" + menuMouseCoords + ")) returned: " + t.getItem(menuMouseCoords)); eventConsole.append ("\n"); } }); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core; import org.apache.lucene.util.SetOnce; import org.elasticsearch.SpecialPermission; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.shard.IndexSettingProvider; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.LicensesMetadata; import org.elasticsearch.license.Licensing; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.EnginePlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.ScriptService; import org.elasticsearch.snapshots.SourceOnlySnapshotRepository; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.cluster.routing.allocation.DataTierAllocationDecider; import org.elasticsearch.xpack.cluster.routing.allocation.mapper.DataTierFieldMapper; import org.elasticsearch.xpack.core.action.ReloadAnalyzerAction; import org.elasticsearch.xpack.core.action.TransportReloadAnalyzersAction; import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; import org.elasticsearch.xpack.core.action.TransportXPackUsageAction; import org.elasticsearch.xpack.core.action.XPackInfoAction; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.rest.action.RestReloadAnalyzersAction; import org.elasticsearch.xpack.core.rest.action.RestXPackInfoAction; import org.elasticsearch.xpack.core.rest.action.RestXPackUsageAction; import org.elasticsearch.xpack.core.security.authc.TokenMetadata; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLConfigurationReloader; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.transform.TransformMetadata; import org.elasticsearch.xpack.core.termsenum.action.TermsEnumAction; import org.elasticsearch.xpack.core.termsenum.action.TransportTermsEnumAction; import org.elasticsearch.xpack.core.termsenum.rest.RestTermsEnumAction; import org.elasticsearch.xpack.core.watcher.WatcherMetadata; import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsConstants; import java.nio.file.Files; import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedAction; import java.time.Clock; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.LongSupplier; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.StreamSupport; public class XPackPlugin extends XPackClientPlugin implements ExtensiblePlugin, RepositoryPlugin, EnginePlugin, ClusterPlugin, MapperPlugin { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(XPackPlugin.class); public static final String ASYNC_RESULTS_INDEX = ".async-search"; public static final String XPACK_INSTALLED_NODE_ATTR = "xpack.installed"; // TODO: clean up this library to not ask for write access to all system properties! static { // invoke this clinit in unbound with permissions to access all system properties SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new SpecialPermission()); } try { AccessController.doPrivileged(new PrivilegedAction<Void>() { @Override public Void run() { try { Class.forName("com.unboundid.util.Debug"); Class.forName("com.unboundid.ldap.sdk.LDAPConnectionOptions"); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } return null; } }); // TODO: fix gradle to add all security resources (plugin metadata) to test classpath // of watcher plugin, which depends on it directly. This prevents these plugins // from being initialized correctly by the test framework, and means we have to // have this leniency. } catch (ExceptionInInitializerError bogus) { if (bogus.getCause() instanceof SecurityException == false) { throw bogus; // some other bug } } } protected final Settings settings; //private final Environment env; protected final Licensing licensing; // These should not be directly accessed as they cannot be overridden in tests. Please use the getters so they can be overridden. private static final SetOnce<XPackLicenseState> licenseState = new SetOnce<>(); private static final SetOnce<SSLService> sslService = new SetOnce<>(); private static final SetOnce<LicenseService> licenseService = new SetOnce<>(); private static final SetOnce<LongSupplier> epochMillisSupplier = new SetOnce<>(); public XPackPlugin( final Settings settings, final Path configPath) { super(settings); // FIXME: The settings might be changed after this (e.g. from "additionalSettings" method in other plugins) // We should only depend on the settings from the Environment object passed to createComponents this.settings = settings; setLicenseState(new XPackLicenseState(settings, () -> getEpochMillisSupplier().getAsLong())); this.licensing = new Licensing(settings); } // overridable by tests protected Clock getClock() { return Clock.systemUTC(); } protected SSLService getSslService() { return getSharedSslService(); } protected LicenseService getLicenseService() { return getSharedLicenseService(); } protected XPackLicenseState getLicenseState() { return getSharedLicenseState(); } protected LongSupplier getEpochMillisSupplier() { return getSharedEpochMillisSupplier(); } protected void setSslService(SSLService sslService) { XPackPlugin.sslService.set(sslService); } protected void setLicenseService(LicenseService licenseService) { XPackPlugin.licenseService.set(licenseService); } protected void setLicenseState(XPackLicenseState licenseState) { XPackPlugin.licenseState.set(licenseState); } protected void setEpochMillisSupplier(LongSupplier epochMillisSupplier) { XPackPlugin.epochMillisSupplier.set(epochMillisSupplier); } public static SSLService getSharedSslService() { final SSLService ssl = XPackPlugin.sslService.get(); if (ssl == null) { throw new IllegalStateException("SSL Service is not constructed yet"); } return ssl; } public static LicenseService getSharedLicenseService() { return licenseService.get(); } public static XPackLicenseState getSharedLicenseState() { return licenseState.get(); } public static LongSupplier getSharedEpochMillisSupplier() { return epochMillisSupplier.get(); } /** * Checks if the cluster state allows this node to add x-pack metadata to the cluster state, * and throws an exception otherwise. * This check should be called before installing any x-pack metadata to the cluster state, * to ensure that the other nodes that are part of the cluster will be able to deserialize * that metadata. Note that if the cluster state already contains x-pack metadata, this * check assumes that the nodes are already ready to receive additional x-pack metadata. * Having this check properly in place everywhere allows to install x-pack into a cluster * using a rolling restart. */ public static void checkReadyForXPackCustomMetadata(ClusterState clusterState) { if (alreadyContainsXPackCustomMetadata(clusterState)) { return; } List<DiscoveryNode> notReadyNodes = nodesNotReadyForXPackCustomMetadata(clusterState); if (notReadyNodes.isEmpty() == false) { throw new IllegalStateException("The following nodes are not ready yet for enabling x-pack custom metadata: " + notReadyNodes); } } /** * Checks if the cluster state allows this node to add x-pack metadata to the cluster state. * See {@link #checkReadyForXPackCustomMetadata} for more details. */ public static boolean isReadyForXPackCustomMetadata(ClusterState clusterState) { return alreadyContainsXPackCustomMetadata(clusterState) || nodesNotReadyForXPackCustomMetadata(clusterState).isEmpty(); } /** * Returns the list of nodes that won't allow this node from adding x-pack metadata to the cluster state. * See {@link #checkReadyForXPackCustomMetadata} for more details. */ public static List<DiscoveryNode> nodesNotReadyForXPackCustomMetadata(ClusterState clusterState) { // check that all nodes would be capable of deserializing newly added x-pack metadata final List<DiscoveryNode> notReadyNodes = StreamSupport.stream(clusterState.nodes().spliterator(), false).filter(node -> { final String xpackInstalledAttr = node.getAttributes().getOrDefault(XPACK_INSTALLED_NODE_ATTR, "false"); return Booleans.parseBoolean(xpackInstalledAttr) == false; }).collect(Collectors.toList()); return notReadyNodes; } private static boolean alreadyContainsXPackCustomMetadata(ClusterState clusterState) { final Metadata metadata = clusterState.metadata(); return metadata.custom(LicensesMetadata.TYPE) != null || metadata.custom(MlMetadata.TYPE) != null || metadata.custom(WatcherMetadata.TYPE) != null || clusterState.custom(TokenMetadata.TYPE) != null || metadata.custom(TransformMetadata.TYPE) != null; } @Override public Map<String, MetadataFieldMapper.TypeParser> getMetadataMappers() { return Map.of(DataTierFieldMapper.NAME, DataTierFieldMapper.PARSER); } @Override public Settings additionalSettings() { final String xpackInstalledNodeAttrSetting = "node.attr." + XPACK_INSTALLED_NODE_ATTR; if (settings.get(xpackInstalledNodeAttrSetting) != null) { throw new IllegalArgumentException("Directly setting [" + xpackInstalledNodeAttrSetting + "] is not permitted"); } return Settings.builder().put(super.additionalSettings()).put(xpackInstalledNodeAttrSetting, "true").build(); } @Override public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry, IndexNameExpressionResolver expressionResolver, Supplier<RepositoriesService> repositoriesServiceSupplier) { List<Object> components = new ArrayList<>(); final SSLService sslService = createSSLService(environment, resourceWatcherService); setLicenseService(new LicenseService(settings, clusterService, getClock(), environment, resourceWatcherService, getLicenseState())); setEpochMillisSupplier(threadPool::absoluteTimeInMillis); // It is useful to override these as they are what guice is injecting into actions components.add(sslService); components.add(getLicenseService()); components.add(getLicenseState()); return components; } @Override public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() { List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> actions = new ArrayList<>(); actions.add(new ActionHandler<>(XPackInfoAction.INSTANCE, getInfoAction())); actions.add(new ActionHandler<>(XPackUsageAction.INSTANCE, getUsageAction())); actions.addAll(licensing.getActions()); actions.add(new ActionHandler<>(ReloadAnalyzerAction.INSTANCE, TransportReloadAnalyzersAction.class)); actions.add(new ActionHandler<>(TermsEnumAction.INSTANCE, TransportTermsEnumAction.class)); actions.add(new ActionHandler<>(DeleteAsyncResultAction.INSTANCE, TransportDeleteAsyncResultAction.class)); actions.add(new ActionHandler<>(XPackInfoFeatureAction.DATA_TIERS, DataTiersInfoTransportAction.class)); actions.add(new ActionHandler<>(XPackUsageFeatureAction.DATA_TIERS, DataTiersUsageTransportAction.class)); return actions; } // overridable for tests protected Class<? extends TransportAction<XPackUsageRequest, XPackUsageResponse>> getUsageAction() { return TransportXPackUsageAction.class; } // overridable for tests protected Class<? extends TransportAction<XPackInfoRequest, XPackInfoResponse>> getInfoAction() { return TransportXPackInfoAction.class; } @Override public List<ActionType<? extends ActionResponse>> getClientActions() { List<ActionType<? extends ActionResponse>> actions = new ArrayList<>(); actions.addAll(licensing.getClientActions()); actions.addAll(super.getClientActions()); return actions; } @Override public List<ActionFilter> getActionFilters() { List<ActionFilter> filters = new ArrayList<>(); filters.addAll(licensing.getActionFilters()); return filters; } @Override public List<RestHandler> getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver, Supplier<DiscoveryNodes> nodesInCluster) { List<RestHandler> handlers = new ArrayList<>(); handlers.add(new RestXPackInfoAction()); handlers.add(new RestXPackUsageAction()); handlers.add(new RestReloadAnalyzersAction()); handlers.add(new RestTermsEnumAction()); handlers.addAll(licensing.getRestHandlers(settings, restController, clusterSettings, indexScopedSettings, settingsFilter, indexNameExpressionResolver, nodesInCluster)); return handlers; } public static void bindFeatureSet(Binder binder, Class<? extends XPackFeatureSet> featureSet) { Multibinder<XPackFeatureSet> featureSetBinder = createFeatureSetMultiBinder(binder, featureSet); featureSetBinder.addBinding().to(featureSet); } public static Multibinder<XPackFeatureSet> createFeatureSetMultiBinder(Binder binder, Class<? extends XPackFeatureSet> featureSet) { binder.bind(featureSet).asEagerSingleton(); return Multibinder.newSetBinder(binder, XPackFeatureSet.class); } public static Path resolveConfigFile(Environment env, String name) { Path config = env.configFile().resolve(name); if (Files.exists(config) == false) { Path legacyConfig = env.configFile().resolve("x-pack").resolve(name); if (Files.exists(legacyConfig)) { deprecationLogger.deprecate(DeprecationCategory.OTHER, "config_file_path", "Config file [" + name + "] is in a deprecated location. Move from " + legacyConfig.toString() + " to " + config.toString()); return legacyConfig; } } return config; } @Override public Map<String, Repository.Factory> getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry, ClusterService clusterService, BigArrays bigArrays, RecoverySettings recoverySettings) { return Collections.singletonMap("source", SourceOnlySnapshotRepository.newRepositoryFactory()); } @Override public Optional<EngineFactory> getEngineFactory(IndexSettings indexSettings) { if (indexSettings.getValue(SourceOnlySnapshotRepository.SOURCE_ONLY) && SearchableSnapshotsConstants.isSearchableSnapshotStore(indexSettings.getSettings()) == false) { return Optional.of(SourceOnlySnapshotRepository.getEngineFactory()); } return Optional.empty(); } @Override public List<Setting<?>> getSettings() { List<Setting<?>> settings = super.getSettings(); settings.add(SourceOnlySnapshotRepository.SOURCE_ONLY); settings.add(DataTierAllocationDecider.CLUSTER_ROUTING_REQUIRE_SETTING); settings.add(DataTierAllocationDecider.CLUSTER_ROUTING_INCLUDE_SETTING); settings.add(DataTierAllocationDecider.CLUSTER_ROUTING_EXCLUDE_SETTING); settings.add(DataTierAllocationDecider.INDEX_ROUTING_REQUIRE_SETTING); settings.add(DataTierAllocationDecider.INDEX_ROUTING_INCLUDE_SETTING); settings.add(DataTierAllocationDecider.INDEX_ROUTING_EXCLUDE_SETTING); settings.add(DataTierAllocationDecider.INDEX_ROUTING_PREFER_SETTING); return settings; } @Override public Collection<AllocationDecider> createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) { return Collections.singleton(new DataTierAllocationDecider(settings, clusterSettings)); } @Override public Collection<IndexSettingProvider> getAdditionalIndexSettingProviders() { return Collections.singleton(new DataTier.DefaultHotAllocationSettingProvider()); } /** * Handles the creation of the SSLService along with the necessary actions to enable reloading * of SSLContexts when configuration files change on disk. */ private SSLService createSSLService(Environment environment, ResourceWatcherService resourceWatcherService) { final Map<String, SSLConfiguration> sslConfigurations = SSLService.getSSLConfigurations(environment.settings()); final SSLConfigurationReloader reloader = new SSLConfigurationReloader(environment, resourceWatcherService, sslConfigurations.values()); final SSLService sslService = new SSLService(environment, sslConfigurations); reloader.setSSLService(sslService); setSslService(sslService); return sslService; } }
package io.nextop.view; import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.util.AttributeSet; import android.view.View; import android.view.ViewParent; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import io.nextop.rx.RxDebugger; import javax.annotation.Nullable; import java.util.*; import java.util.concurrent.TimeUnit; // TODO visualization and interactions to support the debug fragment public class DebugOverlayView extends View { public static final Object TAG = new Object(); @Nullable private ViewSummary rootViewSummary = null; // temp draw state final Paint paint = new Paint(); final int[] wloc = new int[2]; long nanos; public DebugOverlayView(Context context) { super(context); init(); } public DebugOverlayView(Context context, AttributeSet attrs) { super(context, attrs); init(); } public DebugOverlayView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } @SuppressLint("NewApi") public DebugOverlayView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); init(); } private void init() { setTag(TAG); } // FIXME // visualizations/interactions: // - press and hold over debug fragment and drag, to resize debug fragment // - listen to Debugger.getStats() and maintain a list of stats to display (keyed by subscriber) // -- draw stats either 1. over the associated view 2. in the bottom right as a circle coming out, representing no view // TODO is there ever a case of going from view to no view? seems rare (don't need an animation for this) // TODO flash the region when there is an update to a region public void setRootViewSummary(@Nullable ViewSummary rootViewSummary) { this.rootViewSummary = rootViewSummary; invalidate(); // TODO check update times, run a poller until final animation time } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (null != rootViewSummary) { // - draw the root in the bottom right // - draw the view summaries recursively nanos = System.nanoTime(); for (ViewSummary vs : rootViewSummary.nearestDescendants) { drawViewSummary(canvas, vs); } } } // FIXME saw a SO here private void drawViewSummary(Canvas canvas, ViewSummary viewSummary) { // FIXME View rview = viewSummary.view; float rw = rview.getWidth(); float rh = rview.getHeight(); rview.getLocationInWindow(wloc); float rx = wloc[0]; float ry = wloc[1]; getLocationInWindow(wloc); rx -= wloc[0]; ry -= wloc[1]; float p = 2.f; // FIXME lerpColor, maskColor utils (just port processing) // FIXME colors etc int millis = (int) TimeUnit.NANOSECONDS.toMillis(nanos - viewSummary.nanos); int fc, sc; float sw; if (millis < 1000) { fc = Color.argb(35 * (1000 - millis) / 1000, 255, 0, 0); sc = Color.argb(255, 255, 0, 0); sw = 2.f + 3 * (1000 - millis) / 1000.f; } else { fc = 0; sc = Color.argb(255, 255, 0, 0); sw = 2.f; } if (0 < Color.alpha(fc)) { paint.setStyle(Paint.Style.FILL); paint.setColor(fc); canvas.drawRect(rx + p, ry + p, rx + rw - 2 * p, ry + rh - 2 * p, paint); } if (0 < Color.alpha(sc)) { paint.setStyle(Paint.Style.STROKE); paint.setStrokeWidth(sw); paint.setColor(sc); canvas.drawRect(rx + p, ry + p, rx + rw - 2 * p, ry + rh - 2 * p, paint); } // FIXME // summary text if (millis < 2000) { // count + type paint.setStyle(Paint.Style.FILL); // paint.setStrokeWidth(1.f); paint.setTextAlign(Paint.Align.LEFT); paint.setTextSize(32.f); paint.setColor(Color.argb(220, 255, 0, 0)); canvas.drawText(String.format("%d", viewSummary.netOnNextCount), rx + 16, ry + rh - 16, paint); } for (ViewSummary vs : viewSummary.nearestDescendants) { drawViewSummary(canvas, vs); } } public static class ViewSummary { // take the flags from the most recent update public final int flags; public final long nanos; @Nullable public final View view; public final int netOnNextCount; public final int netOnCompletedCount; public final int netOnErrorCount; public final int netFailedNotificationCount; /** this reflects the descendants in the view hierarchy * that have no closer common descendant/ancestor */ public final List<ViewSummary> nearestDescendants; ViewSummary(final int flags, @Nullable View view, int netOnNextCount, int netOnCompletedCount, int netOnErrorCount, int netFailedNotificationCount, final long nanos, List<ViewSummary> nearestDescendants) { this.nanos = nanos; this.view = view; this.netOnNextCount = netOnNextCount; this.netOnCompletedCount = netOnCompletedCount; this.netOnErrorCount = netOnErrorCount; this.netFailedNotificationCount = netFailedNotificationCount; this.flags = flags; this.nearestDescendants = nearestDescendants; } public static ViewSummary create(Collection<RxDebugger.Stats> allStats) { Multimap<View, RxDebugger.Stats> statsMap = ArrayListMultimap.create(); for (RxDebugger.Stats stats : allStats) { @Nullable View v; if (null == stats.view || View.VISIBLE != stats.view.getWindowVisibility() || View.VISIBLE != stats.view.getVisibility()) { v = null; } else { v = stats.view; } statsMap.put(v, stats); } // create the graph Map<Object, View> nearestAncestors = new HashMap<Object, View>(statsMap.size()); Multimap<View, View> nearestDescendants = ArrayListMultimap.create(); Set<View> views = statsMap.keySet(); for (@Nullable View view : views) { if (null != view) { @Nullable View nearestAncestor; if (!nearestAncestors.containsKey(view)) { ViewParent p; View a = null; for (p = view.getParent(); null != p; p = p.getParent()) { if (nearestAncestors.containsKey(p)) { a = nearestAncestors.get(p); break; } else if (views.contains(p)) { a = (View) p; break; } } nearestAncestor = a; // propagate down nearestAncestors.put(view, nearestAncestor); for (ViewParent q = view.getParent(); q != p; q = q.getParent()) { nearestAncestors.put(q, nearestAncestor); } } else { nearestAncestor = nearestAncestors.get(view); } nearestDescendants.put(nearestAncestor, view); } } return create(statsMap, nearestDescendants, null); } private static ViewSummary create(Multimap<View, RxDebugger.Stats> statsMap, Multimap<View, View> nearestDescendants, @Nullable View view) { // roll up the stats long mostRecentNanos = -1L; int mostRecentFlags = 0; int netOnNextCount = 0; int netOnCompletedCount = 0; int netOnErrorCount = 0; int netFailedNotificationCount = 0; for (RxDebugger.Stats stats : statsMap.get(view)) { if (mostRecentNanos < stats.nanos) { mostRecentNanos = stats.nanos; mostRecentFlags = stats.flags; } netOnNextCount += stats.onNextCount; netOnCompletedCount += stats.onCompletedCount; netOnErrorCount += stats.onErrorCount; netFailedNotificationCount += stats.failedNotificationCount; } Collection<View> ndVs = nearestDescendants.get(view); List<ViewSummary> ndVss = new ArrayList<ViewSummary>(ndVs.size()); for (View nearestDescendant : ndVs) { ndVss.add(create(statsMap, nearestDescendants, nearestDescendant)); } return new ViewSummary(mostRecentFlags, view, netOnNextCount, netOnCompletedCount, netOnErrorCount, netFailedNotificationCount, mostRecentNanos, ndVss); } } }
/* * Copyright 2012 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_CONSTRUCTOR_MISSING; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_CONSTRUCTOR_NOT_VALID; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_CONSTRUCTOR_ON_INTERFACE; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_DESCRIPTOR_NOT_VALID; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_ES6_ARROW_FUNCTION_NOT_SUPPORTED; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_ES6_COMPUTED_PROP_NAMES_NOT_SUPPORTED; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_NG_INJECT_ON_CLASS; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_STATICS_NOT_VALID; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_SUPER_CLASS_NOT_VALID; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_TARGET_INVALID; import static com.google.javascript.jscomp.ClosureRewriteClass.GOOG_CLASS_UNEXPECTED_PARAMS; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Unit tests for ClosureRewriteGoogClass * * @author johnlenz@google.com (John Lenz) */ @RunWith(JUnit4.class) public final class ClosureRewriteClassTest extends CompilerTestCase { private static final String EXTERNS = lines( MINIMAL_EXTERNS, "/** @const */ var goog = {};", "goog.inherits = function(a,b) {};", "goog.defineClass = function(a,b) {};", "var use;"); private static final Diagnostic INSTANTIATE_ABSTRACT_CLASS = warning(TypeCheck.INSTANTIATE_ABSTRACT_CLASS); private static final Diagnostic NOT_A_CONSTRUCTOR = warning(TypeCheck.NOT_A_CONSTRUCTOR); private static final Diagnostic INEXISTENT_PROPERTY = warning(TypeCheck.INEXISTENT_PROPERTY); public ClosureRewriteClassTest() { super(EXTERNS); } @Override protected CompilerPass getProcessor(Compiler compiler) { return new ClosureRewriteClass(compiler); } @Override @Before public void setUp() throws Exception { super.setUp(); disableTypeCheck(); enableRunTypeCheckAfterProcessing(); } @Override protected int getNumRepetitions() { return 1; } private void testRewrite(String code, String expected, LanguageMode lang) { setAcceptedLanguage(lang); test(code, expected); } private void testRewrite(String code, String expected){ testRewrite(code, expected, LanguageMode.ECMASCRIPT3); testRewrite(code, expected, LanguageMode.ECMASCRIPT_2015); } private void testRewriteError(String js, DiagnosticType error, LanguageMode lang) { setAcceptedLanguage(lang); testError(js, error); } private void testRewriteError(String js, DiagnosticType error){ testRewriteError(js, error, LanguageMode.ECMASCRIPT3); testRewriteError(js, error, LanguageMode.ECMASCRIPT_2015); } private void testRewriteWarning(String code, String expected, Diagnostic warning, LanguageMode lang) { setAcceptedLanguage(lang); test(code, expected, warning); } private void testRewriteWarning(String code, String expected, Diagnostic warning) { testRewriteWarning(code, expected, warning, LanguageMode.ECMASCRIPT3); testRewriteWarning(code, expected, warning, LanguageMode.ECMASCRIPT_2015); } @Test public void testBasic1() { testRewrite( "var x = goog.defineClass(null, {\n" + " constructor: function(){}\n" + "});", "/** @constructor @struct */" + "var x = function() {};"); } @Test public void testBasic2() { testRewrite( "var x = {};\n" + "x.y = goog.defineClass(null, {\n" + " constructor: function(){}\n" + "});", "var x = {};" + "/** @constructor @struct */" + "x.y = function() {};"); } @Test public void testBasic3() { // verify we don't add a goog.inherits for Object testRewrite( "var x = goog.defineClass(Object, {\n" + " constructor: function(){}\n" + "});", "/** @constructor @struct */" + "var x = function() {};"); } @Test public void testLet() { testRewrite( "let x = goog.defineClass(null, {\n" + " constructor: function(){}\n" + "});", "/** @constructor @struct */" + "let x = function() {};", LanguageMode.ECMASCRIPT_2015); } @Test public void testConst() { testRewrite( "const x = goog.defineClass(null, {\n" + " constructor: function(){}\n" + "});", "/** @constructor @struct */" + "const x = function() {};", LanguageMode.ECMASCRIPT_2015); } @Test public void testAnnotations1() { // verify goog.defineClass values are constructible, by default enableTypeCheck(); testRewrite( "var x = goog.defineClass(Object, {\n" + " constructor: function(){}\n" + "});" + "new x();", "/** @constructor @struct */" + "var x = function() {};" + "new x();"); } @Test public void testAnnotations2a() { // @interface is preserved enableTypeCheck(); testRewriteWarning( lines( "var x = goog.defineClass(null, {", " /** @interface */", " constructor: function(){}", "});", "new x();"), lines( "/** @struct @interface */", "var x = function() {};", "new x();"), NOT_A_CONSTRUCTOR); } @Test public void testAnnotations2b() { // @interface is preserved, at the class level too enableTypeCheck(); testRewriteWarning( lines( "/** @interface */", "var x = goog.defineClass(null, {});", "new x();"), lines( "/** @struct @interface */", "var x = function() {};", "new x();"), NOT_A_CONSTRUCTOR); } @Test public void testAnnotations3a() { // verify goog.defineClass is a @struct by default enableTypeCheck(); testRewriteWarning( lines( "var y = goog.defineClass(null, {", " constructor: function(){}", "});", "var x = goog.defineClass(y, {", " constructor: function(){this.a = 1}", "});", "use(new y().a);"), lines( "/** @constructor @struct */", "var y = function () {};", "/** @constructor @struct @extends {y} */", "var x = function() {this.a = 1};", "goog.inherits(x,y);", "use(new y().a);"), INEXISTENT_PROPERTY); } @Test public void testAnnotations3b() { // verify goog.defineClass is a @struct by default, but can be overridden enableTypeCheck(); testRewrite( lines( "/** @unrestricted */", "var y = goog.defineClass(null, {", " constructor: function(){}", "});", "var x = goog.defineClass(y, {", " constructor: function(){this.a = 1}", "});", "use(new y().a);"), lines( "/** @constructor @unrestricted */", "var y = function () {};", "/** @constructor @struct @extends {y} */", "var x = function() {this.a = 1};", "goog.inherits(x,y);", "use(new y().a);")); } @Test public void testRecordAnnotations() { // @record is preserved testRewrite( "/** @record */\n" + "var Rec = goog.defineClass(null, {f : function() {}});", "/** @struct @record */\n" + "var Rec = function() {};\n" + "Rec.prototype.f = function() {};"); } @Test public void testRecordAnnotations2() { enableTypeCheck(); testRewrite( "/** @record */\n" + "var Rec = goog.defineClass(null, {f : function() {}});\n" + "var /** !Rec */ r = { f : function() {} };", "/** @struct @record */\n" + "var Rec = function() {};\n" + "Rec.prototype.f = function() {};\n" + "var /** !Rec */ r = { f : function() {} };"); } @Test public void testAbstract1() { // @abstract is preserved enableTypeCheck(); testRewriteWarning( lines( "var x = goog.defineClass(null, {", " /** @abstract */", " constructor: function() {}", "});", "new x();"), lines( "/** @abstract @struct @constructor */", "var x = function() {};", "new x();"), INSTANTIATE_ABSTRACT_CLASS); } @Test public void testAbstract2() { // @abstract is preserved, at the class level too enableTypeCheck(); testRewriteWarning( lines( "/** @abstract */", "var x = goog.defineClass(null, {", " constructor: function() {}", "});", "new x();"), lines( "/** @abstract @struct @constructor */", "var x = function() {};", "new x();"), INSTANTIATE_ABSTRACT_CLASS); } @Test public void testInnerClass1() { testRewrite( "var x = goog.defineClass(some.Super, {\n" + " constructor: function(){\n" + " this.foo = 1;\n" + " },\n" + " statics: {\n" + " inner: goog.defineClass(x,{\n" + " constructor: function(){\n" + " this.bar = 1;\n" + " }\n" + " })\n" + " }\n" + "});", "/** @constructor @struct @extends {some.Super} */\n" + "var x = function() { this.foo = 1; };\n" + "goog.inherits(x, some.Super);\n" + "/** @constructor @struct @extends {x} */\n" + "x.inner = function() { this.bar = 1; };\n" + "goog.inherits(x.inner, x);"); } @Test public void testComplete1() { testRewrite( "var x = goog.defineClass(some.Super, {\n" + " constructor: function(){\n" + " this.foo = 1;\n" + " },\n" + " statics: {\n" + " prop1: 1,\n" + " /** @const */\n" + " PROP2: 2\n" + " },\n" + " anotherProp: 1,\n" + " aMethod: function() {}\n" + "});", "/** @constructor @struct @extends {some.Super} */\n" + "var x=function(){this.foo=1};\n" + "goog.inherits(x, some.Super);\n" + "x.prop1=1;\n" + "/** @const */\n" + "x.PROP2=2;\n" + "x.prototype.anotherProp = 1;\n" + "x.prototype.aMethod = function(){};"); } @Test public void testComplete2() { testRewrite( "x.y = goog.defineClass(some.Super, {\n" + " constructor: function(){\n" + " this.foo = 1;\n" + " },\n" + " statics: {\n" + " prop1: 1,\n" + " /** @const */\n" + " PROP2: 2\n" + " },\n" + " anotherProp: 1,\n" + " aMethod: function() {}\n" + "});", "/** @constructor @struct @extends {some.Super} */\n" + "x.y=function(){this.foo=1};\n" + "goog.inherits(x.y,some.Super);\n" + "x.y.prop1 = 1;\n" + "/** @const */\n" + "x.y.PROP2 = 2;\n" + "x.y.prototype.anotherProp = 1;\n" + "x.y.prototype.aMethod=function(){};"); } @Test public void testClassWithStaticInitFn() { testRewrite( "x.y = goog.defineClass(some.Super, {\n" + " constructor: function(){\n" + " this.foo = 1;\n" + " },\n" + " statics: function(cls) {\n" + " cls.prop1 = 1;\n" + " /** @const */\n" + " cls.PROP2 = 2;\n" + " },\n" + " anotherProp: 1,\n" + " aMethod: function() {}\n" + "});", lines( "/** @constructor @struct @extends {some.Super} */", "x.y = function() { this.foo = 1; };", "goog.inherits(x.y, some.Super);", "x.y.prototype.anotherProp = 1;", "x.y.prototype.aMethod = function() {};", "(function(cls) {", " x.y.prop1 = 1;", " /** @const */", " x.y.PROP2 = 2;", "})(x.y);")); } @Test public void testPrivate1() { testRewrite( lines( "/** @private */", "x.y_ = goog.defineClass(null, {", " constructor: function() {}", "});"), "/** @private @constructor @struct */ x.y_ = function() {};"); } @Test public void testPrivate2() { testRewrite( lines( "/** @private */", "x.y_ = goog.defineClass(null, {", " /** @param {string} s */", " constructor: function(s) {}", "});"), lines( "/**", " * @private", " * @constructor", " * @struct", " * @param {string} s", " */", "x.y_ = function(s) {};")); } @Test public void testInvalid1() { testRewriteError("var x = goog.defineClass();", GOOG_CLASS_SUPER_CLASS_NOT_VALID); testRewriteError("var x = goog.defineClass('foo');", GOOG_CLASS_SUPER_CLASS_NOT_VALID); testRewriteError("var x = goog.defineClass(foo());", GOOG_CLASS_SUPER_CLASS_NOT_VALID); testRewriteError("var x = goog.defineClass({'foo':1});", GOOG_CLASS_SUPER_CLASS_NOT_VALID); testRewriteError("var x = goog.defineClass({1:1});", GOOG_CLASS_SUPER_CLASS_NOT_VALID); testRewriteError( "var x = goog.defineClass({get foo() {return 1}});", GOOG_CLASS_SUPER_CLASS_NOT_VALID, LanguageMode.ECMASCRIPT5); testRewriteError( "var x = goog.defineClass({set foo(a) {}});", GOOG_CLASS_SUPER_CLASS_NOT_VALID, LanguageMode.ECMASCRIPT5); } @Test public void testInvalid2() { testRewriteError("var x = goog.defineClass(null);", GOOG_CLASS_DESCRIPTOR_NOT_VALID); testRewriteError("var x = goog.defineClass(null, null);", GOOG_CLASS_DESCRIPTOR_NOT_VALID); testRewriteError("var x = goog.defineClass(null, foo());", GOOG_CLASS_DESCRIPTOR_NOT_VALID); } @Test public void testInvalid3() { testRewriteError("var x = goog.defineClass(null, {});", GOOG_CLASS_CONSTRUCTOR_MISSING); testRewriteError( "/** @interface */\n" + "var x = goog.defineClass(null, { constructor: function() {} });", GOOG_CLASS_CONSTRUCTOR_ON_INTERFACE); } @Test public void testInvalid4() { testRewriteError( "var x = goog.defineClass(null, {" + " constructor: function(){}," + " statics: null" + "});", GOOG_CLASS_STATICS_NOT_VALID); testRewriteError( "var x = goog.defineClass(null, {" + " constructor: function(){}," + " statics: foo" + "});", GOOG_CLASS_STATICS_NOT_VALID); testRewriteError( "var x = goog.defineClass(null, {" + " constructor: function(){}," + " statics: {'foo': 1}" + "});", GOOG_CLASS_STATICS_NOT_VALID); testRewriteError( "var x = goog.defineClass(null, {" + " constructor: function(){}," + " statics: {1: 1}" + "});", GOOG_CLASS_STATICS_NOT_VALID); } @Test public void testInvalid5() { testRewriteError( "var x = goog.defineClass(null, {" + " constructor: function(){}" + "}, null);", GOOG_CLASS_UNEXPECTED_PARAMS); } @Test public void testInvalid6() { testRewriteError("goog.defineClass();", GOOG_CLASS_TARGET_INVALID); testRewriteError("var x = goog.defineClass() || null;", GOOG_CLASS_TARGET_INVALID); testRewriteError("({foo: goog.defineClass()});", GOOG_CLASS_TARGET_INVALID); } @Test public void testInvalid7() { testRewriteError(lines( "var x = goog.defineClass(null, {", " constructor: foo", "});"), GOOG_CLASS_CONSTRUCTOR_NOT_VALID); } @Test public void testGoogModuleGet() { // This pattern can be produced by goog.scope processing from code that originally looks like: // goog.scope(function() { // var super = goog.module.get('ns.Foo'); // var y = goog.defineClass(super, { // // ... // }); // }; testRewrite( lines( "var y = goog.defineClass(goog.module.get('ns.Foo'), {", " constructor: function(){}", "});"), lines( "/** @struct @constructor @extends {ns.Foo} */", "var y = function(){};", "goog.inherits(y, goog.module.get('ns.Foo'));")); } @Test public void testNgInject() { testRewrite( "var x = goog.defineClass(Object, {\n" + " /** @ngInject */ constructor: function(x, y) {}\n" + "});", "/** @ngInject @constructor @struct */\n" + "var x = function(x, y) {};"); } @Test public void testNgInject_onClass() { testRewriteWarning( "/** @ngInject */\n" + "var x = goog.defineClass(Object, {\n" + " constructor: function(x, y) {}\n" + "});", "/** @ngInject @constructor @struct */\n" + "var x = function(x, y) {};", warning(GOOG_CLASS_NG_INJECT_ON_CLASS)); } // The two following tests are just to make sure that these functionalities in // Es6 does not break the compiler during this pass @Test public void testDestructParamOnFunction() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function({a, b, c}) {}", "});"), lines( "/** @constructor @struct */", "var FancyClass = function({a, b, c}) {};"), LanguageMode.ECMASCRIPT_2015); } @Test public void testDefaultParamOnFunction() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function(a = 1) {}", "});"), lines("/** @constructor @struct */", "var FancyClass = function(a = 1) {};"), LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitMethodDefinition1() { testRewrite( lines("var FancyClass = goog.defineClass(null, {", " constructor() {}", "});"), lines("/** @constructor @struct */", "var FancyClass = function() {};"), LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitMethodDefinition2() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function() {},", " someMethod1() {},", " someMethod2() {}", "});"), lines( "/** @constructor @struct */", "var FancyClass = function() {};", "FancyClass.prototype.someMethod1 = function() {};", "FancyClass.prototype.someMethod2 = function() {};"), LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitMethodDefinition3() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor() {},", " someMethod1() {},", " someMethod2() {}", "});"), lines( "/** @constructor @struct */", "var FancyClass = function() {};", "FancyClass.prototype.someMethod1 = function() {};", "FancyClass.prototype.someMethod2 = function() {};"), LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitMethodDefinition4() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor() {},", " statics:{", " someMethod1() {}", " },", " someMethod2() {}", "});"), lines( "/** @constructor @struct */", "var FancyClass = function() {};", "FancyClass.someMethod1 = function() {};", "FancyClass.prototype.someMethod2 = function() {};"), LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitArrowFunction1() { testRewriteError( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function() {},", " someArrowFunc: value => value", "});"), GOOG_CLASS_ES6_ARROW_FUNCTION_NOT_SUPPORTED, LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitArrowFunction2() { testRewriteError( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function() {},", " statics:{", " someArrowFunc: value => value", " }", "});"), GOOG_CLASS_ES6_ARROW_FUNCTION_NOT_SUPPORTED, LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitArrowFunction3() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function() {},", " statics:{", " someFunction() {", " return () => 42", " }", " }", "});"), lines( "/** @constructor @struct */", " var FancyClass = function() {};", " FancyClass.someFunction = function() {", " return () => 42", " };"), LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitArrowFunction4() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function() {},", " someFunction: function() {", " return () => 42", " }", "});"), lines( "/** @constructor @struct */", " var FancyClass = function() {};", " FancyClass.prototype.someFunction = function(){", " return () => 42", " };"), LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitComputedPropName1() { testRewriteError( lines( "var FancyClass = goog.defineClass(null, {", " ['someCompProp_' + 42]: 47,", " someMember: 49,", " constructor: function() {},", "});"), GOOG_CLASS_ES6_COMPUTED_PROP_NAMES_NOT_SUPPORTED, LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitComputedPropName2() { testRewriteError( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function() {},", " statics:{", " ['someCompProp_' + 1999]: 47", " }", "});"), GOOG_CLASS_ES6_COMPUTED_PROP_NAMES_NOT_SUPPORTED, LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitSuperCall1() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function() {},", " someMethod: function() {", " super.someMethod();", " }", "});"), lines( "/** @constructor @struct */", " var FancyClass = function() {};", " FancyClass.prototype.someMethod = function() {", " super.someMethod();", " };"), LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitSuperCall2() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function() {super();},", " someMethod: function() {}", "});"), lines( "/** @constructor @struct */", " var FancyClass = function() {super();};", " FancyClass.prototype.someMethod = function() {};"), LanguageMode.ECMASCRIPT_2015); } @Test public void testExtendedObjLitSuperCall3() { testRewrite( lines( "var FancyClass = goog.defineClass(null, {", " constructor: function() {},", " someMethod: function() {super();}", "});"), lines( "/** @constructor @struct */", "var FancyClass = function() {};", "FancyClass.prototype.someMethod = function() {super();};"), LanguageMode.ECMASCRIPT_2015); } //public void testNestedObjectLiteral(){ //testRewriteError( // lines( // "var FancyClass = goog.defineClass(null, {", // " constructor: function() {},", // " someNestedObjLit:{}", // "});"), // GOOG_CLASS_NESTED_OBJECT_LITERAL_FOUND, LanguageMode.ECMASCRIPT_2015); //testRewriteError( // lines( // "var FancyClass = goog.defineClass(null, {", // " constructor() {},", // " statics:{", // " someNestedObjLit:{}", // " }", // "});"), // GOOG_CLASS_NESTED_OBJECT_LITERAL_FOUND, LanguageMode.ECMASCRIPT_2015); //} }
package ticketline.gui; import java.awt.Component; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.text.MessageFormat; import java.util.Date; import java.util.List; import java.util.ResourceBundle; import java.util.Vector; import javax.swing.BorderFactory; import javax.swing.DefaultListModel; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JLayeredPane; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSpinner; import javax.swing.ListSelectionModel; import javax.swing.SpinnerNumberModel; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import org.apache.log4j.Logger; import ticketline.bl.GuiMemory; import ticketline.bl.ITrail; import ticketline.bl.Trail; import ticketline.cfg.ConfigFactory; import ticketline.dao.DAOFactory; import ticketline.dao.interfaces.ArtikelDAO; import ticketline.dao.interfaces.BestellungDAO; import ticketline.db.Artikel; import ticketline.db.Bestellung; import ticketline.db.BestellungKey; import ticketline.db.Kunde; import ticketline.gui.components.DefaultSearchTable; import ticketline.gui.components.NoEditTableModel; /** * This Class provides the User Interface for * selling Promotional Materials Step 2 * * @author PatrickM, ReneN, AndiS * @version 0.1 * */ public class WerbematerialWaehlen extends JPanel implements ActionListener,ListSelectionListener,ITrail { private static final long serialVersionUID = 1L; private static ResourceBundle lang; private static Integer panel_width = ConfigFactory.getConfig().getDefaultPanelWidth(); private static Integer panel_heigth = ConfigFactory.getConfig().getDefaultPanelHeight(); private static Logger logger = Logger.getLogger(ReservierungTicketsSuchenBearbeitenGui.class); private Trail trail; private JLayeredPane pn_article; private JLayeredPane pn_list; // Table private String[] colnames; private Object[][] data; private DefaultSearchTable tb_articles; private NoEditTableModel tModel; private JScrollPane scrl_articles; // List private JList lst_chosen; private DefaultListModel lModel; private JScrollPane scrl_chosen; private JButton btn_add; private JButton btn_remove; private JButton btn_reset; private JButton btn_next; private JLabel lbl_sum; private SpinnerNumberModel nm; private JSpinner sp_num; private Vector<Artikel> selectedArtikel = new Vector<Artikel>(); private Vector<Artikel> list = new Vector<Artikel>(); private int[] listToSelected; private int[] count; public WerbematerialWaehlen() { logger.debug("WerbematerialienVerkaufenSchritt2Gui started"); lang = ConfigFactory.getConfig().getLanguageBundle(); pn_article = new JLayeredPane(); pn_list = new JLayeredPane(); btn_add = new JButton(lang.getString("BTN_SELECTARTICLE_NEXT")); btn_remove = new JButton(lang.getString("BTN_SELECTARTICLE_REMOVE")); btn_reset = new JButton(lang.getString("BTN_SELECTARTICLE_RESET")); btn_next = new JButton(lang.getString("BTN_SELECTARTICLE_FINISH")); lbl_sum = new JLabel(lang.getString("LBL_SELECTARTICLE_SUM") + ": 0"); // Initialize Table Colnames colnames = new String[5]; colnames[0] = lang.getString("LBL_SELECTARTICLE_NUMBER"); colnames[1] = lang.getString("LBL_SELECTARTICLE_SHORTDESC"); colnames[2] = lang.getString("LBL_SELECTARTICLE_CATEGORY"); colnames[3] = lang.getString("LBL_SELECTARTICLE_DESC"); colnames[4] = lang.getString("LBL_SELECTARTICLE_PRICE"); // Initialize Table tb_articles = new DefaultSearchTable(); tb_articles.setAutoCreateRowSorter(true); tb_articles.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); tModel = new NoEditTableModel(data,colnames); tb_articles.setModel(tModel); tb_articles.getSelectionModel().addListSelectionListener(this); scrl_articles = new JScrollPane(tb_articles); scrl_articles.setMinimumSize(new Dimension(600,600)); scrl_articles.setPreferredSize(new Dimension(ConfigFactory.getConfig().getDefaultPanelWidth()-50, ConfigFactory.getConfig().getDefaultPanelHeight()-50)); // Initialize List lst_chosen = new JList(); lst_chosen.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); lModel = new DefaultListModel(); lst_chosen.setModel(lModel); lst_chosen.getSelectionModel().addListSelectionListener(this); scrl_chosen = new JScrollPane(lst_chosen); scrl_chosen.setMinimumSize(new Dimension(600,600)); scrl_chosen.setPreferredSize(new Dimension(ConfigFactory.getConfig().getDefaultPanelWidth()-50, ConfigFactory.getConfig().getDefaultPanelHeight()-50)); // Num Spinner nm = new SpinnerNumberModel(1,0,999,1); sp_num = new JSpinner(nm); // Layout GridBagLayout gb_main = new GridBagLayout(); setLayout(gb_main); setSize(panel_width, panel_heigth); GridBagConstraints c = new GridBagConstraints(); c.anchor = GridBagConstraints.LINE_START; c.insets = new Insets(5,5,5,5); /** * Article Panel */ // Set Layout GridBagLayout gb_article = new GridBagLayout(); pn_article.setLayout(gb_article); pn_article.setBorder(BorderFactory.createTitledBorder(lang.getString("PNL_SELECTARTICLE"))); // Add Table c.gridx = 0; c.gridy = 0; c.anchor = GridBagConstraints.CENTER; scrl_articles.setMinimumSize(new Dimension(570,600)); scrl_articles.setPreferredSize(new Dimension(ConfigFactory.getConfig().getDefaultPanelWidth(), (ConfigFactory.getConfig().getDefaultPanelHeight()/2)-50)); gb_article.setConstraints(scrl_articles, c); pn_article.add(scrl_articles); // Add Add Button c.gridx = 0; c.gridy = 1; c.anchor = GridBagConstraints.LINE_END; btn_add.addActionListener(this); btn_add.setActionCommand("add"); gb_article.setConstraints(btn_add, c); btn_add.setEnabled(false); pn_article.add(btn_add); // Add Spinner c.gridx = 0; c.gridy = 1; c.anchor = GridBagConstraints.LINE_START; gb_article.setConstraints(sp_num, c); pn_article.add(sp_num); // Add Article Panel c.gridx = 0; c.gridy = 0; gb_main.setConstraints(pn_article, c); add(pn_article); /** * List Panel */ // Set Layout GridBagLayout gb_list = new GridBagLayout(); pn_list.setLayout(gb_list); pn_list.setBorder(BorderFactory.createTitledBorder(lang.getString("PNL_SELECTARTICLE_BILL"))); // Add Table c.gridx = 0; c.gridy = 0; c.anchor = GridBagConstraints.CENTER; scrl_chosen.setMinimumSize(new Dimension(570,600)); scrl_chosen.setPreferredSize(new Dimension(ConfigFactory.getConfig().getDefaultPanelWidth(), (ConfigFactory.getConfig().getDefaultPanelHeight()/2)-50)); gb_list.setConstraints(scrl_chosen, c); pn_list.add(scrl_chosen); // Add Reset Button c.gridx = 0; c.gridy = 1; c.anchor = GridBagConstraints.LINE_START; btn_reset.addActionListener(this); btn_reset.setActionCommand("reset"); gb_list.setConstraints(btn_reset, c); btn_reset.setEnabled(true); pn_list.add(btn_reset); // Add Remove Button c.gridx = 0; c.gridy = 1; c.anchor = GridBagConstraints.LINE_END; btn_remove.addActionListener(this); btn_remove.setActionCommand("remove"); gb_list.setConstraints(btn_remove, c); btn_remove.setEnabled(false); pn_list.add(btn_remove); // Add List Panel c.gridx = 0; c.gridy = 1; gb_main.setConstraints(pn_list, c); add(pn_list); /** * Trail Buttons */ // Add Next Button c.gridx = 0; c.gridy = 2; c.anchor = GridBagConstraints.LINE_END; btn_next.addActionListener(this); btn_next.setActionCommand("next"); gb_main.setConstraints(btn_next, c); btn_next.setEnabled(false); add(btn_next); // Add Next Button c.gridx = 0; c.gridy = 2; c.anchor = GridBagConstraints.LINE_START; gb_main.setConstraints(lbl_sum, c); add(lbl_sum); // Set Visible setVisible(true); loadArtikelList(); logger.debug("WerbematerialienVerkaufenSchritt2Gui Created Successfully"); } public void actionPerformed(ActionEvent evt) { // Add if(evt.getActionCommand().equals("add")){ if(tb_articles.getSelectedRow() != -1 && tb_articles.getValueAt(tb_articles.getSelectedRow(),0) !=null && !tb_articles.getValueAt(tb_articles.getSelectedRow(),0).equals("")) { ArtikelDAO adao = DAOFactory.getArtikelDAO(); for(int i = 0; i<nm.getNumber().intValue();i++) selectedArtikel.add(adao.get((Integer)tb_articles.getValueAt(tb_articles.getSelectedRow(),0))); loadSelectedList(); } btn_next.setEnabled(true); } // Remove if(evt.getActionCommand().equals("remove")){ if(lst_chosen.getSelectedIndex()!=-1) { int artnum = listToSelected[lst_chosen.getSelectedIndex()]; for(int i = 0; i<selectedArtikel.size(); i++) { if(selectedArtikel.get(i).getArtikelnr()==artnum) { selectedArtikel.remove(selectedArtikel.get(i)); i--; } } loadSelectedList(); } if(lModel.getSize() > 0 ) { btn_next.setEnabled(true); } else{ btn_next.setEnabled(false); } } // Reset if(evt.getActionCommand().equals("reset")){ selectedArtikel.clear(); loadSelectedList(); btn_next.setEnabled(false); } // Next if(evt.getActionCommand().equals("next")){ Integer sum = 0; //Calculate the sum of the bill sum = calcSum(); //Show Question int n = JOptionPane.showConfirmDialog( this, new MessageFormat(lang.getString("OPT_SELECTARTICLE_QUESTION")).format(new String[] { sum.toString() }).toString(), lang.getString("OPH_SELECTARTICLE_QUESTION"), JOptionPane.YES_NO_OPTION); //Show Information if(n==0) { BestellungDAO bdao = DAOFactory.getBestellungDAO(); for(Artikel a : list) { Kunde k = GuiMemory.getKunde(); Bestellung b = new Bestellung(); BestellungKey bkey = new BestellungKey(); bkey.setArtikelnr(a.getArtikelnr()); bkey.setDatumuhrzeit(new Date()); bkey.setKartennr(k.getKartennr()); b.setComp_id(bkey); b.setArtikel(a); b.setKunde(k); b.setMenge(count[a.getArtikelnr()]); b.setZahlart("bar"); bdao.save(b); } JOptionPane.showMessageDialog(this,lang.getString("OPT_SELECTARTICLE_SOLD"),lang.getString("OPH_SELECTARTICLE_SOLD"),JOptionPane.INFORMATION_MESSAGE); nm.setValue(1); tb_articles.clearSelection(); btn_next.setEnabled(false); selectedArtikel.clear(); loadSelectedList(); } } } @Override public void dummySearch() { // TODO Auto-generated method stub } @Override public void reloadChanges() { } private int calcSum() { int sum = 0; for(Artikel a : selectedArtikel) { sum += a.getPreis().intValue(); } return sum; } private void loadSelectedList() { lModel.clear(); list = new Vector<Artikel>(); count = new int[tModel.getRowCount()+1]; for(Artikel a : selectedArtikel) { count[a.getArtikelnr()]++; if(!list.contains(a)) { listToSelected[list.size()] = a.getArtikelnr(); list.add(a); } } for(Artikel a : list) { lModel.addElement("Anzahl: " + count[a.getArtikelnr()] + " ArtikelNr: " + a.getArtikelnr() + " Bezeichnung: " + a.getKurzbezeichnung() ); } lbl_sum.setText(lang.getString("LBL_SELECTARTICLE_SUM") + ": " + calcSum()); } private void loadArtikelList() { ArtikelDAO adao = DAOFactory.getArtikelDAO(); try { List<Artikel> result = adao.getAll(); if(result.size() == 0){ System.out.println("Nothing Found!"); } else { data = new Object[result.size()][5]; int i = 0; for(Artikel art:result){ data[i][0] = art.getArtikelnr(); data[i][1] = art.getKurzbezeichnung(); switch(art.getKategorie().charAt(0)) { case('0'): data[i][2]="T_Shirt"; break; case('1'): data[i][2]="Poster"; break; case('2'): data[i][2]="CD/LP"; break; case('3'): data[i][2]="Video/DVD"; break; case('4'): data[i][2]="Sonstiges"; break; } data[i][3] = art.getBeschreibung(); data[i][4] = art.getPreis(); i++; } tModel = new NoEditTableModel(data,colnames); tb_articles.setModel(tModel); listToSelected = new int[result.size()+1]; } } catch(Exception e) { System.out.println(e.getStackTrace()); } } @Override public void valueChanged(ListSelectionEvent arg0) { if(lst_chosen.getSelectedIndex() != -1 ) { btn_remove.setEnabled(true); } else{ btn_remove.setEnabled(false); } if(tb_articles.getSelectedRow() != -1 && tb_articles.getValueAt(tb_articles.getSelectedRow(),0) !=null && !tb_articles.getValueAt(tb_articles.getSelectedRow(),0).equals("")) { ArtikelDAO adao = DAOFactory.getArtikelDAO(); Artikel art = adao.get((Integer)tb_articles.getValueAt(tb_articles.getSelectedRow(),0)); btn_add.setEnabled(true); btn_add.setText(new MessageFormat(lang.getString("BTN_SELECTARTICLE_NEXTSELECTARTICLE")).format(new String[] { tb_articles.getValueAt(tb_articles.getSelectedRow(),0).toString() })); if(ConfigFactory.getConfig().getDefaultPanelWidth() < 700){ btn_add.setPreferredSize(new Dimension(ConfigFactory.getConfig().getDefaultPanelWidth()/2,22)); } } else { btn_add.setText(lang.getString("BTN_SELECTARTICLE_NEXT")); btn_add.setEnabled(false); } } @Override public void setTrail(Trail trail) { this.trail = trail; //getData(); } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine.app; import android.annotation.SuppressLint; import android.content.Context; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.preference.PreferenceManager; import android.text.format.Time; import com.example.android.sunshine.app.sync.SunshineSyncAdapter; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; public class Utility { public static final String DATE_FORMAT = "yyyyMMdd"; public static String getPreferredLocation(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_location_key), context.getString(R.string.pref_location_default)); } public static boolean isMetric(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_units_key), context.getString(R.string.pref_units_metric)) .equals(context.getString(R.string.pref_units_metric)); } public static String formatTemperature(Context context,double temperature, boolean isMetric) { double temp; if (!isMetric){ temp = 9*temperature/5+32; }else{ temp = temperature; } return context.getString(R.string.format_temperature,temp); } static String formatDate(long dateInMillis) { Date date = new Date(dateInMillis); return DateFormat.getDateInstance().format(date); } /** * Helper method to convert the database representation of the date into something to display * to users. As classy and polished a user experience as "20140102" is, we can do better. * * @param context Context to use for resource localization * @param dateInMillis The date in milliseconds * @return a user-friendly representation of the date. */ @SuppressLint("StringFormatMatches") public static String getFriendlyDayString(Context context, long dateInMillis) { // The day string for forecast uses the following logic: // For today: "Today, June 8" // For tomorrow: "Tomorrow" // For the next 5 days: "Wednesday" (just the day name) // For all days after that: "Mon Jun 8" Calendar calendar = Calendar.getInstance(); int currentJulianDay = calendar.get(Calendar.DAY_OF_YEAR); calendar.setTimeInMillis(dateInMillis); int julianDay = calendar.get(Calendar.DAY_OF_YEAR); // If the date we're building the String for is today's date, the format // is "Today, June 24" if (julianDay == currentJulianDay) { String today = context.getString(R.string.today); int formatId = R.string.format_full_friendly_date; return String.format(context.getString( formatId, today, getFormattedMonthDay(context, dateInMillis))); } else if ( julianDay < currentJulianDay + 7 ) { // If the input date is less than a week in the future, just return the day name. return getDayName(context, dateInMillis); } else { // Otherwise, use the form "Mon Jun 3" SimpleDateFormat shortenedDateFormat = new SimpleDateFormat("EEE MMM dd"); return shortenedDateFormat.format(dateInMillis); } } /** * Given a day, returns just the name to use for that day. * E.g "today", "tomorrow", "wednesday". * * @param context Context to use for resource localization * @param dateInMillis The date in milliseconds * @return */ public static String getDayName(Context context, long dateInMillis) { // If the date is today, return the localized version of "Today" instead of the actual // day name. Time t = new Time(); t.setToNow(); int julianDay = Time.getJulianDay(dateInMillis, t.gmtoff); int currentJulianDay = Time.getJulianDay(System.currentTimeMillis(), t.gmtoff); if (julianDay == currentJulianDay) { return context.getString(R.string.today); } else if ( julianDay == currentJulianDay +1 ) { return context.getString(R.string.tomorrow); } else { Time time = new Time(); time.setToNow(); // Otherwise, the format is just the day of the week (e.g "Wednesday". SimpleDateFormat dayFormat = new SimpleDateFormat("EEEE"); return dayFormat.format(dateInMillis); } } public static String getFormattedHumidity (Context context, double humidity){ return context.getString(R.string.format_humidity,humidity); } public static String getFormattedPressure (Context context, double pressure){ return context.getString(R.string.format_pressure,pressure); } /** * Converts db date format to the format "Month day", e.g "June 24". * @param context Context to use for resource localization * @param dateInMillis The db formatted date string, expected to be of the form specified * in Utility.DATE_FORMAT * @return The day in the form of a string formatted "December 6" */ public static String getFormattedMonthDay(Context context, long dateInMillis ) { Time time = new Time(); time.setToNow(); SimpleDateFormat dbDateFormat = new SimpleDateFormat(Utility.DATE_FORMAT); SimpleDateFormat monthDayFormat = new SimpleDateFormat("MMMM dd"); String monthDayString = monthDayFormat.format(dateInMillis); return monthDayString; } public static String getFormattedWind(Context context, float windSpeed, float degrees) { int windFormat; if (Utility.isMetric(context)) { windFormat = R.string.format_wind_kmh; } else { windFormat = R.string.format_wind_mph; windSpeed = .621371192237334f * windSpeed; } // From wind direction in degrees, determine compass direction as a string (e.g NW) // You know what's fun, writing really long if/else statements with tons of possible // conditions. Seriously, try it! String direction = "Unknown"; if (degrees >= 337.5 || degrees < 22.5) { direction = "N"; } else if (degrees >= 22.5 && degrees < 67.5) { direction = "NE"; } else if (degrees >= 67.5 && degrees < 112.5) { direction = "E"; } else if (degrees >= 112.5 && degrees < 157.5) { direction = "SE"; } else if (degrees >= 157.5 && degrees < 202.5) { direction = "S"; } else if (degrees >= 202.5 && degrees < 247.5) { direction = "SW"; } else if (degrees >= 247.5 && degrees < 292.5) { direction = "W"; } else if (degrees >= 292.5 && degrees < 337.5) { direction = "NW"; } return String.format(context.getString(windFormat), windSpeed, direction); } /** * Helper method to provide the icon resource id according to the weather condition id returned * by the OpenWeatherMap call. * @param weatherId from OpenWeatherMap API response * @return resource id for the corresponding icon. -1 if no relation is found. */ public static int getSmallIconResourceForWeatherCondition(int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return R.drawable.ic_storm; } else if (weatherId >= 300 && weatherId <= 321) { return R.drawable.ic_light_rain; } else if (weatherId >= 500 && weatherId <= 504) { return R.drawable.ic_rain; } else if (weatherId == 511) { return R.drawable.ic_snow; } else if (weatherId >= 520 && weatherId <= 531) { return R.drawable.ic_rain; } else if (weatherId >= 600 && weatherId <= 622) { return R.drawable.ic_snow; } else if (weatherId >= 701 && weatherId <= 761) { return R.drawable.ic_fog; } else if (weatherId == 761 || weatherId == 781) { return R.drawable.ic_storm; } else if (weatherId == 800) { return R.drawable.ic_clear; } else if (weatherId == 801) { return R.drawable.ic_light_clouds; } else if (weatherId >= 802 && weatherId <= 804) { return R.drawable.ic_cloudy; } return -1; } public static Bitmap getLargeIconResourceForWeatherCondition(Context context,int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_storm); } else if (weatherId >= 300 && weatherId <= 321) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_light_rain); } else if (weatherId >= 500 && weatherId <= 504) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_rain); } else if (weatherId == 511) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_snow); } else if (weatherId >= 520 && weatherId <= 531) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_rain); } else if (weatherId >= 600 && weatherId <= 622) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_snow); } else if (weatherId >= 701 && weatherId <= 761) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_fog); } else if (weatherId == 761 || weatherId == 781) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_storm); } else if (weatherId == 800) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_clear); } else if (weatherId == 801) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_light_clouds); } else if (weatherId >= 802 && weatherId <= 804) { return BitmapFactory.decodeResource(context.getResources(),R.drawable.art_clouds); } return null; } /** * Helper method to provide the art resource id according to the weather condition id returned * by the OpenWeatherMap call. * @param weatherId from OpenWeatherMap API response * @return resource id for the corresponding image. -1 if no relation is found. */ public static int getArtResourceForWeatherCondition(int weatherId) { // Based on weather code data found at: // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes if (weatherId >= 200 && weatherId <= 232) { return R.drawable.art_storm; } else if (weatherId >= 300 && weatherId <= 321) { return R.drawable.art_light_rain; } else if (weatherId >= 500 && weatherId <= 504) { return R.drawable.art_rain; } else if (weatherId == 511) { return R.drawable.art_snow; } else if (weatherId >= 520 && weatherId <= 531) { return R.drawable.art_rain; } else if (weatherId >= 600 && weatherId <= 622) { return R.drawable.art_rain; } else if (weatherId >= 701 && weatherId <= 761) { return R.drawable.art_fog; } else if (weatherId == 761 || weatherId == 781) { return R.drawable.art_storm; } else if (weatherId == 800) { return R.drawable.art_clear; } else if (weatherId == 801) { return R.drawable.art_light_clouds; } else if (weatherId >= 802 && weatherId <= 804) { return R.drawable.art_clouds; } return -1; } // @return true if the network is available public static boolean isNetworkAvailable(Context context){ ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(context.CONNECTIVITY_SERVICE); NetworkInfo networkInfo = connectivityManager.getActiveNetworkInfo(); return (networkInfo != null && networkInfo.isConnected()); } // @param c Context used to get the SharedPreferences // @return the location status integer type @SuppressWarnings("ResourceType") public static @SunshineSyncAdapter.LocationStatus int getLocationStatus (Context context){ SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context); return sharedPreferences.getInt(context.getString(R.string.pref_location_status_key),SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerUpdateType; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.UpdateContainerRequest; import org.apache.hadoop.yarn.api.records.UpdatedContainer; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.DrainDispatcher; import org.apache.hadoop.yarn.server.resourcemanager.MockAM; import org.apache.hadoop.yarn.server.resourcemanager.MockNM; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.NullRMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeImpl; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica .FiCaSchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.placement.PlacementSet; import org.apache.hadoop.yarn.util.resource.Resources; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class TestContainerResizing { private static final Log LOG = LogFactory.getLog(TestContainerResizing.class); private final int GB = 1024; private YarnConfiguration conf; RMNodeLabelsManager mgr; class MyScheduler extends CapacityScheduler { /* * A Mock Scheduler to simulate the potential effect of deadlock between: * 1. The AbstractYarnScheduler.decreaseContainers() call (from * ApplicationMasterService thread) * 2. The CapacityScheduler.allocateContainersToNode() call (from the * scheduler thread) */ MyScheduler() { super(); } @Override public CSAssignment allocateContainersToNode( PlacementSet<FiCaSchedulerNode> ps, boolean withNodeHeartbeat) { try { Thread.sleep(1000); } catch(InterruptedException e) { LOG.debug("Thread interrupted."); } return super.allocateContainersToNode(ps, withNodeHeartbeat); } } @Before public void setUp() throws Exception { conf = new YarnConfiguration(); conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, ResourceScheduler.class); mgr = new NullRMNodeLabelsManager(); mgr.init(conf); } @Test public void testSimpleIncreaseContainer() throws Exception { /** * Application has a container running, and the node has enough available * resource. Add a increase request to see if container will be increased */ MockRM rm1 = new MockRM() { @Override public RMNodeLabelsManager createNodeLabelManager() { return mgr; } }; rm1.start(); MockNM nm1 = rm1.registerNode("h1:1234", 20 * GB); // app1 -> a1 RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); ContainerId containerId1 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 1); sentRMContainerLaunched(rm1, containerId1); // am1 asks to change its AM container from 1GB to 3GB am1.sendContainerResizingRequest(Arrays.asList( UpdateContainerRequest .newInstance(0, containerId1, ContainerUpdateType.INCREASE_RESOURCE, Resources.createResource(3 * GB), null))); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm1, app1.getApplicationId()); checkPendingResource(rm1, "default", 2 * GB, null); Assert.assertEquals(2 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // NM1 do 1 heartbeats CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); // Pending resource should be deducted checkPendingResource(rm1, "default", 0 * GB, null); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); verifyContainerIncreased(am1.allocate(null, null), containerId1, 3 * GB); verifyAvailableResourceOfSchedulerNode(rm1, nm1.getNodeId(), 17 * GB); rm1.close(); } @Test public void testSimpleDecreaseContainer() throws Exception { /** * Application has a container running, try to decrease the container and * check queue's usage and container resource will be updated. */ final DrainDispatcher dispatcher = new DrainDispatcher(); MockRM rm1 = new MockRM() { @Override public RMNodeLabelsManager createNodeLabelManager() { return mgr; } @Override protected Dispatcher createDispatcher() { return dispatcher; } }; rm1.start(); MockNM nm1 = rm1.registerNode("h1:1234", 20 * GB); // app1 -> a1 RMApp app1 = rm1.submitApp(3 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm1, app1.getApplicationId()); checkUsedResource(rm1, "default", 3 * GB, null); Assert.assertEquals(3 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); ContainerId containerId1 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 1); sentRMContainerLaunched(rm1, containerId1); // am1 asks to change its AM container from 1GB to 3GB AllocateResponse response = am1.sendContainerResizingRequest(Arrays .asList(UpdateContainerRequest .newInstance(0, containerId1, ContainerUpdateType.DECREASE_RESOURCE, Resources.createResource(1 * GB), null))); verifyContainerDecreased(response, containerId1, 1 * GB); // Wait for scheduler to finish processing kill events.. dispatcher.waitForEventThreadToWait(); checkUsedResource(rm1, "default", 1 * GB, null); Assert.assertEquals(1 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); // Check if decreased containers added to RMNode RMNodeImpl rmNode = (RMNodeImpl) rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); Collection<Container> decreasedContainers = rmNode.getToBeUpdatedContainers(); boolean rmNodeReceivedDecreaseContainer = false; for (Container c : decreasedContainers) { if (c.getId().equals(containerId1) && c.getResource().equals(Resources.createResource(1 * GB))) { rmNodeReceivedDecreaseContainer = true; } } Assert.assertTrue(rmNodeReceivedDecreaseContainer); rm1.close(); } @Test public void testSimpleIncreaseRequestReservation() throws Exception { /** * Application has two containers running, try to increase one of then, node * doesn't have enough resource, so the increase request will be reserved. * Check resource usage after container reserved, finish a container, the * reserved container should be allocated. */ MockRM rm1 = new MockRM() { @Override public RMNodeLabelsManager createNodeLabelManager() { return mgr; } }; rm1.start(); MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB); MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB); // app1 -> a1 RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm1, app1.getApplicationId()); // Allocate two more containers am1.allocate( Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*", Resources.createResource(2 * GB), 1)), null); ContainerId containerId2 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); Assert.assertTrue(rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED)); // Acquire them, and NM report RUNNING am1.allocate(null, null); sentRMContainerLaunched(rm1, containerId2); ContainerId containerId1 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 1); sentRMContainerLaunched(rm1, containerId1); // am1 asks to change its AM container from 1GB to 3GB am1.sendContainerResizingRequest(Arrays.asList( UpdateContainerRequest .newInstance(0, containerId1, ContainerUpdateType.INCREASE_RESOURCE, Resources.createResource(7 * GB), null))); checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // NM1 do 1 heartbeats CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); /* Check reservation statuses */ // Increase request should be reserved Assert.assertFalse(app.getReservedContainers().isEmpty()); Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will not be changed since it's not satisfied checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 9 * GB, null); Assert.assertEquals(9 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(3 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); // Complete one container and do another allocation am1.allocate(null, Arrays.asList(containerId2)); cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); // Now container should be increased verifyContainerIncreased(am1.allocate(null, null), containerId1, 7 * GB); /* Check statuses after reservation satisfied */ // Increase request should be unreserved Assert.assertTrue(app.getReservedContainers().isEmpty()); Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will be changed since it's satisfied checkPendingResource(rm1, "default", 0 * GB, null); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 7 * GB, null); Assert.assertEquals(7 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); Assert.assertEquals(7 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); verifyAvailableResourceOfSchedulerNode(rm1, nm1.getNodeId(), 1 * GB); rm1.close(); } @Test public void testIncreaseRequestWithNoHeadroomLeft() throws Exception { /** * Application has two containers running, try to increase one of them, the * requested amount exceeds user's headroom for the queue. */ MockRM rm1 = new MockRM() { @Override public RMNodeLabelsManager createNodeLabelManager() { return mgr; } }; rm1.start(); MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB); // app1 -> a1 RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm1, app1.getApplicationId()); // Allocate 1 container am1.allocate( Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*", Resources.createResource(2 * GB), 1)), null); ContainerId containerId2 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); Assert.assertTrue(rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED)); // Acquire them, and NM report RUNNING am1.allocate(null, null); sentRMContainerLaunched(rm1, containerId2); // am1 asks to change container2 from 2GB to 8GB, which will exceed user // limit am1.sendContainerResizingRequest(Arrays.asList( UpdateContainerRequest .newInstance(0, containerId2, ContainerUpdateType.INCREASE_RESOURCE, Resources.createResource(8 * GB), null))); checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // NM1 do 1 heartbeats CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); /* Check reservation statuses */ // Increase request should *NOT* be reserved as it exceeds user limit Assert.assertTrue(app.getReservedContainers().isEmpty()); Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will not be changed since it's not satisfied checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will *NOT* be updated checkUsedResource(rm1, "default", 3 * GB, null); Assert.assertEquals(3 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(3 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); rm1.close(); } @Test public void testExcessiveReservationWhenCancelIncreaseRequest() throws Exception { /** * Application has two containers running, try to increase one of then, node * doesn't have enough resource, so the increase request will be reserved. * Check resource usage after container reserved, finish a container & * cancel the increase request, reservation should be cancelled */ MockRM rm1 = new MockRM() { @Override public RMNodeLabelsManager createNodeLabelManager() { return mgr; } }; rm1.start(); MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB); MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB); // app1 -> a1 RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm1, app1.getApplicationId()); // Allocate two more containers am1.allocate( Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*", Resources.createResource(2 * GB), 1)), null); ContainerId containerId2 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); Assert.assertTrue(rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED)); // Acquire them, and NM report RUNNING am1.allocate(null, null); sentRMContainerLaunched(rm1, containerId2); ContainerId containerId1 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 1); sentRMContainerLaunched(rm1, containerId1); // am1 asks to change its AM container from 1GB to 7GB am1.sendContainerResizingRequest(Arrays.asList( UpdateContainerRequest .newInstance(0, containerId1, ContainerUpdateType.INCREASE_RESOURCE, Resources.createResource(7 * GB), null))); checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // NM1 do 1 heartbeats CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); /* Check reservation statuses */ // Increase request should be reserved Assert.assertFalse(app.getReservedContainers().isEmpty()); Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will not be changed since it's not satisfied checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 9 * GB, null); Assert.assertEquals(9 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(3 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); // Complete one container and cancel increase request (via send a increase // request, make target_capacity=existing_capacity) am1.allocate(null, Arrays.asList(containerId2)); // am1 asks to change its AM container from 1G to 1G (cancel the increase // request actually) am1.sendContainerResizingRequest(Arrays.asList( UpdateContainerRequest .newInstance(0, containerId1, ContainerUpdateType.INCREASE_RESOURCE, Resources.createResource(1 * GB), null))); // Trigger a node heartbeat.. cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); /* Check statuses after reservation satisfied */ // Increase request should be unreserved Assert.assertTrue(app.getReservedContainers().isEmpty()); Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will be changed since it's satisfied checkPendingResource(rm1, "default", 0 * GB, null); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 1 * GB, null); Assert.assertEquals(1 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); Assert.assertEquals(1 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); rm1.close(); } @Test public void testExcessiveReservationWhenDecreaseSameContainer() throws Exception { /** * Very similar to testExcessiveReservationWhenCancelIncreaseRequest, after * the increase request reserved, it decreases the reserved container, * container should be decreased and reservation will be cancelled */ final DrainDispatcher dispatcher = new DrainDispatcher(); MockRM rm1 = new MockRM() { @Override public RMNodeLabelsManager createNodeLabelManager() { return mgr; } @Override protected Dispatcher createDispatcher() { return dispatcher; } }; rm1.start(); MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB); MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB); // app1 -> a1 RMApp app1 = rm1.submitApp(2 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm1, app1.getApplicationId()); // Allocate two more containers am1.allocate( Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*", Resources.createResource(2 * GB), 1)), null); ContainerId containerId2 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); Assert.assertTrue(rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED)); // Acquire them, and NM report RUNNING am1.allocate(null, null); sentRMContainerLaunched(rm1, containerId2); ContainerId containerId1 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 1); sentRMContainerLaunched(rm1, containerId1); // am1 asks to change its AM container from 2GB to 8GB am1.sendContainerResizingRequest(Arrays.asList( UpdateContainerRequest .newInstance(0, containerId1, ContainerUpdateType.INCREASE_RESOURCE, Resources.createResource(8 * GB), null))); checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // NM1 do 1 heartbeats CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); /* Check reservation statuses */ // Increase request should be reserved Assert.assertFalse(app.getReservedContainers().isEmpty()); Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will not be changed since it's not satisfied checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 10 * GB, null); Assert.assertEquals(10 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(4 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); // Complete one container and cancel increase request (via send a increase // request, make target_capacity=existing_capacity) am1.allocate(null, Arrays.asList(containerId2)); // am1 asks to change its AM container from 2G to 1G (decrease) am1.sendContainerResizingRequest(Arrays.asList( UpdateContainerRequest .newInstance(0, containerId1, ContainerUpdateType.DECREASE_RESOURCE, Resources.createResource(1 * GB), null))); // Trigger a node heartbeat.. cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); dispatcher.waitForEventThreadToWait(); /* Check statuses after reservation satisfied */ // Increase request should be unreserved Assert.assertTrue(app.getReservedContainers().isEmpty()); Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will be changed since it's satisfied checkPendingResource(rm1, "default", 0 * GB, null); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 1 * GB, null); Assert.assertEquals(1 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); Assert.assertEquals(1 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); rm1.close(); } @Test public void testIncreaseContainerUnreservedWhenContainerCompleted() throws Exception { /** * App has two containers on the same node (node.resource = 8G), container1 * = 2G, container2 = 2G. App asks to increase container2 to 8G. * * So increase container request will be reserved. When app releases * container2, reserved part should be released as well. */ final DrainDispatcher dispatcher = new DrainDispatcher(); MockRM rm1 = new MockRM() { @Override public RMNodeLabelsManager createNodeLabelManager() { return mgr; } @Override protected Dispatcher createDispatcher() { return dispatcher; } }; rm1.start(); MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB); MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB); // app1 -> a1 RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm1, app1.getApplicationId()); // Allocate two more containers am1.allocate( Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*", Resources.createResource(2 * GB), 1)), null); ContainerId containerId2 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); Assert.assertTrue(rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED)); // Acquire them, and NM report RUNNING am1.allocate(null, null); sentRMContainerLaunched(rm1, containerId2); rm1.waitForState(Arrays.asList(nm1, nm2), containerId2, RMContainerState.RUNNING); // am1 asks to change its AM container from 2GB to 8GB am1.sendContainerResizingRequest(Arrays.asList( UpdateContainerRequest .newInstance(0, containerId2, ContainerUpdateType.INCREASE_RESOURCE, Resources.createResource(8 * GB), null))); checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // NM1 do 1 heartbeats CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); /* Check reservation statuses */ // Increase request should be reserved Assert.assertFalse(app.getReservedContainers().isEmpty()); Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will not be changed since it's not satisfied checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 9 * GB, null); Assert.assertEquals(9 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(3 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); // Complete container2, container will be unreserved and completed am1.allocate(null, Arrays.asList(containerId2)); cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); am1.allocate(null, null); // Wait for scheduler to process all events. dispatcher.waitForEventThreadToWait(); /* Check statuses after reservation satisfied */ // Increase request should be unreserved Assert.assertTrue(app.getReservedContainers().isEmpty()); Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will be changed since it's satisfied checkPendingResource(rm1, "default", 0 * GB, null); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 1 * GB, null); Assert.assertEquals(1 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); Assert.assertEquals(1 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); rm1.close(); } @Test public void testIncreaseContainerUnreservedWhenApplicationCompleted() throws Exception { /** * Similar to testIncreaseContainerUnreservedWhenContainerCompleted, when * application finishes, reserved increase container should be cancelled */ MockRM rm1 = new MockRM() { @Override public RMNodeLabelsManager createNodeLabelManager() { return mgr; } }; rm1.start(); MockNM nm1 = rm1.registerNode("h1:1234", 8 * GB); MockNM nm2 = rm1.registerNode("h2:1234", 8 * GB); // app1 -> a1 RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm1, app1.getApplicationId()); // Allocate two more containers am1.allocate( Arrays.asList(ResourceRequest.newInstance(Priority.newInstance(1), "*", Resources.createResource(2 * GB), 1)), null); ContainerId containerId2 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 2); Assert.assertTrue( rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED)); // Acquire them, and NM report RUNNING am1.allocate(null, null); sentRMContainerLaunched(rm1, containerId2); // am1 asks to change its AM container from 2GB to 8GB am1.sendContainerResizingRequest(Arrays.asList( UpdateContainerRequest .newInstance(0, containerId2, ContainerUpdateType.INCREASE_RESOURCE, Resources.createResource(8 * GB), null))); checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // NM1 do 1 heartbeats CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); /* Check reservation statuses */ // Increase request should be reserved Assert.assertFalse(app.getReservedContainers().isEmpty()); Assert.assertNotNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will not be changed since it's not satisfied checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 9 * GB, null); Assert.assertEquals(9 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(3 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); // Kill the application cs.handle(new AppAttemptRemovedSchedulerEvent(am1.getApplicationAttemptId(), RMAppAttemptState.KILLED, false)); /* Check statuses after reservation satisfied */ // Increase request should be unreserved Assert.assertTrue(app.getReservedContainers().isEmpty()); Assert.assertNull(cs.getNode(nm1.getNodeId()).getReservedContainer()); // Pending resource will be changed since it's satisfied cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); checkPendingResource(rm1, "default", 0 * GB, null); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 0 * GB, null); // User will be removed Assert.assertNull(((LeafQueue) cs.getQueue("default")).getUser("user")); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); rm1.close(); } private void allocateAndLaunchContainers(MockAM am, MockNM nm, MockRM rm, int nContainer, int mem, int priority, int startContainerId) throws Exception { am.allocate(Arrays .asList(ResourceRequest.newInstance(Priority.newInstance(priority), "*", Resources.createResource(mem), nContainer)), null); ContainerId lastContainerId = ContainerId.newContainerId( am.getApplicationAttemptId(), startContainerId + nContainer - 1); Assert.assertTrue(rm.waitForState(nm, lastContainerId, RMContainerState.ALLOCATED)); // Acquire them, and NM report RUNNING am.allocate(null, null); for (int cId = startContainerId; cId < startContainerId + nContainer; cId++) { sentRMContainerLaunched(rm, ContainerId.newContainerId(am.getApplicationAttemptId(), cId)); rm.waitForState(nm, ContainerId.newContainerId(am.getApplicationAttemptId(), cId), RMContainerState.RUNNING); } } @Test public void testOrderOfIncreaseContainerRequestAllocation() throws Exception { /** * There're multiple containers need to be increased, check container will * be increase sorted by priority, if priority is same, smaller containerId * container will get preferred */ MockRM rm1 = new MockRM() { @Override public RMNodeLabelsManager createNodeLabelManager() { return mgr; } }; rm1.start(); MockNM nm1 = rm1.registerNode("h1:1234", 10 * GB); // app1 -> a1 RMApp app1 = rm1.submitApp(1 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm1, app1.getApplicationId()); ApplicationAttemptId attemptId = am1.getApplicationAttemptId(); // Container 2, 3 (priority=3) allocateAndLaunchContainers(am1, nm1, rm1, 2, 1 * GB, 3, 2); // Container 4, 5 (priority=2) allocateAndLaunchContainers(am1, nm1, rm1, 2, 1 * GB, 2, 4); // Container 6, 7 (priority=4) allocateAndLaunchContainers(am1, nm1, rm1, 2, 1 * GB, 4, 6); // am1 asks to change its container[2-7] from 1G to 2G List<UpdateContainerRequest> increaseRequests = new ArrayList<>(); for (int cId = 2; cId <= 7; cId++) { ContainerId containerId = ContainerId.newContainerId(am1.getApplicationAttemptId(), cId); increaseRequests.add(UpdateContainerRequest .newInstance(0, containerId, ContainerUpdateType.INCREASE_RESOURCE, Resources.createResource(2 * GB), null)); } am1.sendContainerResizingRequest(increaseRequests); checkPendingResource(rm1, "default", 6 * GB, null); Assert.assertEquals(6 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Get rmNode1 CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler(); RMNode rmNode1 = rm1.getRMContext().getRMNodes().get(nm1.getNodeId()); // assignContainer, container-4/5/2 increased (which has highest priority OR // earlier allocated) cs.handle(new NodeUpdateSchedulerEvent(rmNode1)); AllocateResponse allocateResponse = am1.allocate(null, null); Assert.assertEquals(3, allocateResponse.getUpdatedContainers().size()); verifyContainerIncreased(allocateResponse, ContainerId.newContainerId(attemptId, 4), 2 * GB); verifyContainerIncreased(allocateResponse, ContainerId.newContainerId(attemptId, 5), 2 * GB); verifyContainerIncreased(allocateResponse, ContainerId.newContainerId(attemptId, 2), 2 * GB); /* Check statuses after allocation */ // There're still 3 pending increase requests checkPendingResource(rm1, "default", 3 * GB, null); Assert.assertEquals(3 * GB, app.getAppAttemptResourceUsage().getPending().getMemorySize()); // Queue/user/application's usage will be updated checkUsedResource(rm1, "default", 10 * GB, null); Assert.assertEquals(10 * GB, ((LeafQueue) cs.getQueue("default")) .getUser("user").getUsed().getMemorySize()); Assert.assertEquals(0 * GB, app.getAppAttemptResourceUsage().getReserved().getMemorySize()); Assert.assertEquals(10 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); rm1.close(); } @Test (timeout = 60000) public void testDecreaseContainerWillNotDeadlockContainerAllocation() throws Exception { // create and start MockRM with our MyScheduler MockRM rm = new MockRM() { @Override public ResourceScheduler createScheduler() { CapacityScheduler cs = new MyScheduler(); cs.setConf(conf); return cs; } }; rm.start(); // register a node MockNM nm = rm.registerNode("h1:1234", 20 * GB); // submit an application -> app1 RMApp app1 = rm.submitApp(3 * GB, "app", "user", null, "default"); MockAM am1 = MockRM.launchAndRegisterAM(app1, rm, nm); // making sure resource is allocated checkUsedResource(rm, "default", 3 * GB, null); FiCaSchedulerApp app = TestUtils.getFiCaSchedulerApp( rm, app1.getApplicationId()); Assert.assertEquals(3 * GB, app.getAppAttemptResourceUsage().getUsed().getMemorySize()); // making sure container is launched ContainerId containerId1 = ContainerId.newContainerId(am1.getApplicationAttemptId(), 1); sentRMContainerLaunched(rm, containerId1); // submit allocation request for a new container am1.allocate(Collections.singletonList(ResourceRequest.newInstance( Priority.newInstance(1), "*", Resources.createResource(2 * GB), 1)), null); // nm reports status update and triggers container allocation nm.nodeHeartbeat(true); // *In the mean time*, am1 asks to decrease its AM container resource from // 3GB to 1GB AllocateResponse response = am1.sendContainerResizingRequest( Collections.singletonList(UpdateContainerRequest .newInstance(0, containerId1, ContainerUpdateType.DECREASE_RESOURCE, Resources.createResource(GB), null))); // verify that the containe resource is decreased verifyContainerDecreased(response, containerId1, GB); rm.close(); } private void checkPendingResource(MockRM rm, String queueName, int memory, String label) { CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler(); CSQueue queue = cs.getQueue(queueName); Assert.assertEquals(memory, queue.getQueueResourceUsage() .getPending(label == null ? RMNodeLabelsManager.NO_LABEL : label) .getMemorySize()); } private void checkUsedResource(MockRM rm, String queueName, int memory, String label) { CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler(); CSQueue queue = cs.getQueue(queueName); Assert.assertEquals(memory, queue.getQueueResourceUsage() .getUsed(label == null ? RMNodeLabelsManager.NO_LABEL : label) .getMemorySize()); } private void verifyContainerIncreased(AllocateResponse response, ContainerId containerId, int mem) { List<UpdatedContainer> increasedContainers = response.getUpdatedContainers(); boolean found = false; for (UpdatedContainer c : increasedContainers) { if (c.getContainer().getId().equals(containerId)) { found = true; Assert.assertEquals(ContainerUpdateType.INCREASE_RESOURCE, c.getUpdateType()); Assert.assertEquals(mem, c.getContainer().getResource().getMemorySize()); } } if (!found) { Assert.fail("Container not increased: containerId=" + containerId); } } private void verifyContainerDecreased(AllocateResponse response, ContainerId containerId, int mem) { List<UpdatedContainer> decreasedContainers = response.getUpdatedContainers(); boolean found = false; for (UpdatedContainer c : decreasedContainers) { if (c.getContainer().getId().equals(containerId)) { found = true; Assert.assertEquals(ContainerUpdateType.DECREASE_RESOURCE, c.getUpdateType()); Assert.assertEquals(mem, c.getContainer().getResource().getMemorySize()); } } if (!found) { Assert.fail("Container not decreased: containerId=" + containerId); } } private void sentRMContainerLaunched(MockRM rm, ContainerId containerId) { CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler(); RMContainer rmContainer = cs.getRMContainer(containerId); if (rmContainer != null) { rmContainer.handle( new RMContainerEvent(containerId, RMContainerEventType.LAUNCHED)); } else { Assert.fail("Cannot find RMContainer"); } } private void verifyAvailableResourceOfSchedulerNode(MockRM rm, NodeId nodeId, int expectedMemory) { CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler(); SchedulerNode node = cs.getNode(nodeId); Assert .assertEquals(expectedMemory, node.getUnallocatedResource().getMemorySize()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.connect.data; import org.apache.kafka.connect.errors.DataException; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; public class ConnectSchema implements Schema { /** * Maps Schema.Types to a list of Java classes that can be used to represent them. */ private static final Map<Type, List<Class>> SCHEMA_TYPE_CLASSES = new EnumMap<>(Type.class); /** * Maps known logical types to a list of Java classes that can be used to represent them. */ private static final Map<String, List<Class>> LOGICAL_TYPE_CLASSES = new HashMap<>(); /** * Maps the Java classes to the corresponding Schema.Type. */ private static final Map<Class<?>, Type> JAVA_CLASS_SCHEMA_TYPES = new HashMap<>(); static { SCHEMA_TYPE_CLASSES.put(Type.INT8, Collections.singletonList((Class) Byte.class)); SCHEMA_TYPE_CLASSES.put(Type.INT16, Collections.singletonList((Class) Short.class)); SCHEMA_TYPE_CLASSES.put(Type.INT32, Collections.singletonList((Class) Integer.class)); SCHEMA_TYPE_CLASSES.put(Type.INT64, Collections.singletonList((Class) Long.class)); SCHEMA_TYPE_CLASSES.put(Type.FLOAT32, Collections.singletonList((Class) Float.class)); SCHEMA_TYPE_CLASSES.put(Type.FLOAT64, Collections.singletonList((Class) Double.class)); SCHEMA_TYPE_CLASSES.put(Type.BOOLEAN, Collections.singletonList((Class) Boolean.class)); SCHEMA_TYPE_CLASSES.put(Type.STRING, Collections.singletonList((Class) String.class)); // Bytes are special and have 2 representations. byte[] causes problems because it doesn't handle equals() and // hashCode() like we want objects to, so we support both byte[] and ByteBuffer. Using plain byte[] can cause // those methods to fail, so ByteBuffers are recommended SCHEMA_TYPE_CLASSES.put(Type.BYTES, Arrays.asList((Class) byte[].class, (Class) ByteBuffer.class)); SCHEMA_TYPE_CLASSES.put(Type.ARRAY, Collections.singletonList((Class) List.class)); SCHEMA_TYPE_CLASSES.put(Type.MAP, Collections.singletonList((Class) Map.class)); SCHEMA_TYPE_CLASSES.put(Type.STRUCT, Collections.singletonList((Class) Struct.class)); for (Map.Entry<Type, List<Class>> schemaClasses : SCHEMA_TYPE_CLASSES.entrySet()) { for (Class<?> schemaClass : schemaClasses.getValue()) JAVA_CLASS_SCHEMA_TYPES.put(schemaClass, schemaClasses.getKey()); } LOGICAL_TYPE_CLASSES.put(Decimal.LOGICAL_NAME, Collections.singletonList((Class) BigDecimal.class)); LOGICAL_TYPE_CLASSES.put(Date.LOGICAL_NAME, Collections.singletonList((Class) java.util.Date.class)); LOGICAL_TYPE_CLASSES.put(Time.LOGICAL_NAME, Collections.singletonList((Class) java.util.Date.class)); LOGICAL_TYPE_CLASSES.put(Timestamp.LOGICAL_NAME, Collections.singletonList((Class) java.util.Date.class)); // We don't need to put these into JAVA_CLASS_SCHEMA_TYPES since that's only used to determine schemas for // schemaless data and logical types will have ambiguous schemas (e.g. many of them use the same Java class) so // they should not be used without schemas. } // The type of the field private final Type type; private final boolean optional; private final Object defaultValue; private final List<Field> fields; private final Map<String, Field> fieldsByName; private final Schema keySchema; private final Schema valueSchema; // Optional name and version provide a built-in way to indicate what type of data is included. Most // useful for structs to indicate the semantics of the struct and map it to some existing underlying // serializer-specific schema. However, can also be useful in specifying other logical types (e.g. a set is an array // with additional constraints). private final String name; private final Integer version; // Optional human readable documentation describing this schema. private final String doc; private final Map<String, String> parameters; // precomputed hash code. There is no need to re-compute every time hashCode() is called. private Integer hash = null; /** * Construct a Schema. Most users should not construct schemas manually, preferring {@link SchemaBuilder} instead. */ public ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema) { this.type = type; this.optional = optional; this.defaultValue = defaultValue; this.name = name; this.version = version; this.doc = doc; this.parameters = parameters; if (this.type == Type.STRUCT) { this.fields = fields == null ? Collections.<Field>emptyList() : fields; this.fieldsByName = new HashMap<>(this.fields.size()); for (Field field : this.fields) fieldsByName.put(field.name(), field); } else { this.fields = null; this.fieldsByName = null; } this.keySchema = keySchema; this.valueSchema = valueSchema; } /** * Construct a Schema for a primitive type, setting schema parameters, struct fields, and key and value schemas to null. */ public ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc) { this(type, optional, defaultValue, name, version, doc, null, null, null, null); } /** * Construct a default schema for a primitive type. The schema is required, has no default value, name, version, * or documentation. */ public ConnectSchema(Type type) { this(type, false, null, null, null, null); } @Override public Type type() { return type; } @Override public boolean isOptional() { return optional; } @Override public Object defaultValue() { return defaultValue; } @Override public String name() { return name; } @Override public Integer version() { return version; } @Override public String doc() { return doc; } @Override public Map<String, String> parameters() { return parameters; } @Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; } @Override public Field field(String fieldName) { if (type != Type.STRUCT) throw new DataException("Cannot look up fields on non-struct type"); return fieldsByName.get(fieldName); } @Override public Schema keySchema() { if (type != Type.MAP) throw new DataException("Cannot look up key schema on non-map type"); return keySchema; } @Override public Schema valueSchema() { if (type != Type.MAP && type != Type.ARRAY) throw new DataException("Cannot look up value schema on non-array and non-map type"); return valueSchema; } /** * Validate that the value can be used with the schema, i.e. that its type matches the schema type and nullability * requirements. Throws a DataException if the value is invalid. * @param schema Schema to test * @param value value to test */ public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } public static void validateValue(String name, Schema schema, Object value) { if (value == null) { if (!schema.isOptional()) throw new DataException("Invalid value: null used for required field: \"" + name + "\", schema type: " + schema.type()); else return; } List<Class> expectedClasses = LOGICAL_TYPE_CLASSES.get(schema.name()); if (expectedClasses == null) expectedClasses = SCHEMA_TYPE_CLASSES.get(schema.type()); if (expectedClasses == null) throw new DataException("Invalid Java object for schema type " + schema.type() + ": " + value.getClass() + " for field: \"" + name + "\""); boolean foundMatch = false; for (Class<?> expectedClass : expectedClasses) { if (expectedClass.isInstance(value)) { foundMatch = true; break; } } if (!foundMatch) throw new DataException("Invalid Java object for schema type " + schema.type() + ": " + value.getClass() + " for field: \"" + name + "\""); switch (schema.type()) { case STRUCT: Struct struct = (Struct) value; if (!struct.schema().equals(schema)) throw new DataException("Struct schemas do not match."); struct.validate(); break; case ARRAY: List<?> array = (List<?>) value; for (Object entry : array) validateValue(schema.valueSchema(), entry); break; case MAP: Map<?, ?> map = (Map<?, ?>) value; for (Map.Entry<?, ?> entry : map.entrySet()) { validateValue(schema.keySchema(), entry.getKey()); validateValue(schema.valueSchema(), entry.getValue()); } break; } } /** * Validate that the value can be used for this schema, i.e. that its type matches the schema type and optional * requirements. Throws a DataException if the value is invalid. * @param value the value to validate */ public void validateValue(Object value) { validateValue(this, value); } @Override public ConnectSchema schema() { return this; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ConnectSchema schema = (ConnectSchema) o; return Objects.equals(optional, schema.optional) && Objects.equals(version, schema.version) && Objects.equals(name, schema.name) && Objects.equals(doc, schema.doc) && Objects.equals(type, schema.type) && Objects.equals(defaultValue, schema.defaultValue) && Objects.equals(fields, schema.fields) && Objects.equals(keySchema, schema.keySchema) && Objects.equals(valueSchema, schema.valueSchema) && Objects.equals(parameters, schema.parameters); } @Override public int hashCode() { if (this.hash == null) { this.hash = Objects.hash(type, optional, defaultValue, fields, keySchema, valueSchema, name, version, doc, parameters); } return this.hash; } @Override public String toString() { if (name != null) return "Schema{" + name + ":" + type + "}"; else return "Schema{" + type + "}"; } /** * Get the {@link Schema.Type} associated with the given class. * * @param klass the Class to * @return the corresponding type, or null if there is no matching type */ public static Type schemaType(Class<?> klass) { synchronized (JAVA_CLASS_SCHEMA_TYPES) { Type schemaType = JAVA_CLASS_SCHEMA_TYPES.get(klass); if (schemaType != null) return schemaType; // Since the lookup only checks the class, we need to also try for (Map.Entry<Class<?>, Type> entry : JAVA_CLASS_SCHEMA_TYPES.entrySet()) { try { klass.asSubclass(entry.getKey()); // Cache this for subsequent lookups JAVA_CLASS_SCHEMA_TYPES.put(klass, entry.getValue()); return entry.getValue(); } catch (ClassCastException e) { // Expected, ignore } } } return null; } }
package com.perforce.p4java.impl.mapbased.server.cmd; import static com.google.common.collect.Lists.newArrayList; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.CODE0; import static com.perforce.p4java.impl.mapbased.rpc.func.RpcFunctionMapKey.FMT0; import static com.perforce.p4java.server.CmdSpec.STREAM; import static org.apache.commons.lang3.StringUtils.EMPTY; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.mockito.ArgumentMatchers.anyMap; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.List; import java.util.Map; import com.perforce.p4java.server.IOptionsServer; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import com.nitorcreations.junit.runners.NestedRunner; import com.perforce.p4java.core.IStream; import com.perforce.p4java.impl.mapbased.MapKeys; import com.perforce.p4java.impl.mapbased.server.Server; import com.perforce.p4java.option.server.GetStreamOptions; import com.perforce.p4java.option.server.StreamOptions; import org.apache.commons.lang3.ArrayUtils; /** * @author Sean Shou * @since 6/10/2016 */ @RunWith(NestedRunner.class) public class StreamDelegatorTest { private static final String MESSAGE_CODE_IN_INFO_RANGE = "268435456"; private String streamName = "my Stream"; private StreamDelegator streamDelegator; private Map<String, Object> resultMap; private List<Map<String, Object>> resultMaps; private StreamOptions streamOptions; private IStream stream; private IOptionsServer server; /** * Runs before every test. */ @SuppressWarnings("unchecked") @Before public void beforeEach() { server = mock(Server.class); streamDelegator = new StreamDelegator(server); resultMap = mock(Map.class); resultMaps = newArrayList(resultMap); stream = mock(IStream.class); streamOptions = new StreamOptions(); } private void givenInfoMessageCode(String mapKey) { when(resultMap.get(FMT0)).thenReturn("%" + mapKey + "%"); when(resultMap.get(CODE0)).thenReturn(MESSAGE_CODE_IN_INFO_RANGE); when(resultMap.get(mapKey)).thenReturn(streamName); } /** * Test createStream() */ public class TestCreateStream { private final String[] createCmdArguments = {"-i"}; /** * Rule for expected exception verification */ @Rule public ExpectedException thrown = ExpectedException.none(); /** * Runs before every test. */ @SuppressWarnings("unchecked") @Before public void beforeEach() throws Exception { when(server.execMapCmdList(eq(STREAM.toString()), eq(createCmdArguments), anyMap())) .thenReturn(resultMaps); } /** * Expected throws <code><NullPointerException/code> when input 'stream' is null * * @throws Exception */ @Test public void shouldThrowsNullPointerExceptionWhenStreamIsNull() throws Exception { thrown.expect(NullPointerException.class); // given stream = null; // then streamDelegator.createStream(stream); } /** * Expected return non blank created stream name * * @throws Exception */ @Test public void shouldReturnNonBlankCreatedStreamName() throws Exception { // given givenInfoMessageCode("createStreamName"); // when String createStreamName = streamDelegator.createStream(stream); // then assertThat(createStreamName, is(streamName)); } } /** * Test getStream() */ public class TestGetStream { private final String streamPath = "//depot/stream/dev"; private final String[] getCmdOptions = {"-o"}; private final String[] getCmdArguments = ArrayUtils.add(getCmdOptions, streamPath); /** * Test getStream(streamPath) */ public class WhenStreamPathGiven { /** * Runs before every test. */ @SuppressWarnings("unchecked") @Before public void beforeEach() throws Exception { when(resultMap.get(MapKeys.NAME_KEY)).thenReturn(streamName); when(server.execMapCmdList(eq(STREAM.toString()), eq(getCmdArguments), eq(null))) .thenReturn(resultMaps); } /** * Expected return non null <code>IStream</code> * * @throws Exception */ @Test public void shouldReturnNonNullStream() throws Exception { // when IStream actualStream = streamDelegator.getStream(streamPath); // then assertThat(actualStream, notNullValue()); assertThat(actualStream.getName(), is(streamName)); } } /** * Test getStream(streamPath, getStreamOptions) */ public class WhenStreamPathGetStreamOptionsGiven { /** * Rule for expected exception verification */ @Rule public ExpectedException thrown = ExpectedException.none(); private GetStreamOptions opts = new GetStreamOptions(); @SuppressWarnings("unchecked") @Before public void beforeEach() throws Exception { when(resultMap.get(MapKeys.NAME_KEY)).thenReturn(streamName); when(server.execMapCmdList(eq(STREAM.toString()), eq(getCmdArguments), eq(null))) .thenReturn(resultMaps); } /** * Expected throws <code>IllegalArgumentException</code> when 'streamPath' is blank. * * @throws Exception */ @Test public void shouldThrownIllegalArgumentExceptionWhenStreamPathIsBlank() throws Exception { thrown.expect(IllegalArgumentException.class); streamDelegator.getStream(EMPTY, opts); } /** * Expected return non null <code>IStream</code> instance * * @throws Exception */ @Test public void shouldReturnNonNullStream() throws Exception { // when IStream actualStream = streamDelegator.getStream(streamPath, opts); // then assertThat(actualStream.getName(), is(streamName)); } } } /** * Test updateStream() */ public class TestUpdateStream { private final String[] updateCmdArguments = {"-i"}; /** * Rule for expected exception verification */ @Rule public ExpectedException thrown = ExpectedException.none(); private GetStreamOptions opts = new GetStreamOptions(); @SuppressWarnings("unchecked") @Before public void beforeEach() throws Exception { when(server.execMapCmdList(eq(STREAM.toString()), eq(updateCmdArguments), anyMap())) .thenReturn(resultMaps); } /** * Expected throws <code>NullPointerException</code> when input stream is null * * @throws Exception */ @Test public void shouldThrowNullPointerExceptionWhenStreamIsNull() throws Exception { thrown.expect(NullPointerException.class); // given stream = null; // then streamDelegator.updateStream(stream, streamOptions); } /** * Expected return non blank updated stream name * * @throws Exception */ @Test public void shouldReturnUpdatedStreamName() throws Exception { givenInfoMessageCode("updateStreamName"); // when String updateStream = streamDelegator.updateStream(stream, streamOptions); // then assertThat(updateStream, is(streamName)); } } /** * Test deleteStream() */ public class TestDeleteStream { /** * Rule for expected exception verification */ @Rule public ExpectedException thrown = ExpectedException.none(); private String streamPath; private String[] deleteCmdArguments; @SuppressWarnings("unchecked") @Before public void beforeEach() throws Exception { streamPath = "//depot/stream/dev"; deleteCmdArguments = new String[]{"-d", streamPath}; when(server.execMapCmdList(eq(STREAM.toString()), eq(deleteCmdArguments), eq(null))) .thenReturn(resultMaps); } /** * Expected throws <code>NullPointerException</code> when 'streamPath' is blank * * @throws Exception */ @Test public void shouldThrownIllegalArgumentExceptionWhenStreamPathIsBlank() throws Exception { // then thrown.expect(IllegalArgumentException.class); // given streamPath = EMPTY; //when streamDelegator.deleteStream(EMPTY, streamOptions); } /** * Expected return non blank deleted stream name * * @throws Exception */ @Test public void shouldReturnNonBlankDeletedStreamName() throws Exception { givenInfoMessageCode("deleteStreamName"); // when String deleteStream = streamDelegator.deleteStream(streamPath, streamOptions); // then assertThat(deleteStream, is(streamName)); } } }
package mdes.slick.sui; import org.newdawn.slick.AngelCodeFont; import org.newdawn.slick.Color; import org.newdawn.slick.Font; import org.newdawn.slick.Image; /** * Label is the base class for displaying * a String and/or Image on a component. Text is * always drawn over images, which are drawn over * the color background (if opaque). * <p> * Text and images can be aligned horizontally and/or * vertically through setHorizontalAlignment * and setVerticalAlignment. * <p> * All labels start with a padding of 0, initially * centered. * * * @author davedes * @since b.0.1 */ public class Label extends Container { /** A constant for the horizontal alignment. */ public static final int LEFT_ALIGNMENT = 0; /** A constant for the horizontal alignment. */ public static final int RIGHT_ALIGNMENT = 1; /** A constant for the vertical/horizontal alignment. */ public static final int CENTER_ALIGNMENT = 2; /** A constant for the vertical alignment. */ public static final int TOP_ALIGNMENT = 3; /** A constant for the vertical alignment. */ public static final int BOTTOM_ALIGNMENT = 4; /** The text to be displayed. */ private String text = null; /** The image to be displayed. */ private Image image = null; /** A cached yoff. */ protected float yoff; /** The current horizontal alignment. */ protected int horizAlignment = CENTER_ALIGNMENT; /** The current vertical alignment. */ protected int vertAlignment = CENTER_ALIGNMENT; /** The current disabled color, initially gray. */ protected Color disabledColor = Sui.getTheme().getDisabledMask().darker(.5f); //TODO: fix disabled foreground protected Color filter = new Color(1f, 1f, 1f, 1f); /** * Creates a new label with the specified text and image. * * @param image the image to be displayed (rendered below text) * @param text the text to be displayed */ public Label(Image image, String text) { this(true); this.setImage(image); this.setText(text); } /** Creates a new label with the specified text. */ public Label(String text) { this(null, text); } /** * Creates a new label with the specified image. * * @param image the image to be displayed */ public Label(Image image) { this(image, null); } /** Creates a new empty label. */ public Label() { this(null, null); } protected Label(boolean updateAppearance) { super(false); if (updateAppearance) updateAppearance(); } public void updateAppearance() { setAppearance(Sui.getSkin().getLabelAppearance(this)); setPreferredSize(getPackedSize()); } public void setImageFilter(Color filter) { this.filter = filter; } public Color getImageFilter() { return filter; } /** * Packs this label based on current font & text, * leaving a space for padding. */ protected Dimension getPackedSize() { Font font = getFont(); float objWidth = 0; float objHeight = 0; Dimension d = new Dimension(); if (getImage()!=null) { objWidth = getImage().getWidth(); objHeight = getImage().getHeight(); } if (getText()!=null && getText().length()!=0 && font!=null) { objWidth = Math.max(objWidth, getTextWidth()); objHeight = Math.max(objHeight, getTextHeight()); } Padding padding = getPadding(); d.width = padding.left + objWidth + padding.right; d.height = padding.top + objHeight + padding.bottom; return d; } public void pack() { setSize(getPackedSize()); } public void packImage() { if(this.image != null) this.image = getImage().getScaledCopy((int)getWidth(), (int)getHeight()); } public void setFont(Font f) { super.setFont(f); this.yoff = getYOffset(getText()); } /** * Sets the disabled foreground color to be * used. * * @param c the new foreground color */ public void setDisabledForeground(Color c) { this.disabledColor = c; } /** * Gets the disabled foreground color. * * @return the disabled foreground color */ public Color getDisabledForeground() { return disabledColor; } /** * Sets the Image to be displayed. * * @param i the Image to draw */ public void setImage(Image i) { this.image = i; } /** * Gets the Image being displayed. * * @return the label's image */ public Image getImage() { return image; } /** * Sets the text to be displayed. * * @param text the text to draw */ public void setText(String text) { this.text = text; this.yoff = getYOffset(text); } /** * Gets the text being displayed. * * @return the text for this label */ public String getText() { return text; } /** * Sets the horizontal alignment of the text/image. * * @param horizAlignment the alignment constant; either LEFT_ALIGNMENT, * RIGHT_ALIGNMENT, or CENTER_ALIGNMENT */ public void setHorizontalAlignment(int horizAlignment) { this.horizAlignment = horizAlignment; } /** * Gets the horizontal alignment of the text/image. * * @return the horizontal alingment constant */ public int getHorizontalAlignment() { return horizAlignment; } /** * Sets the vertical alignment of the text/image. * * @param vertAlignment the alignment constant; either TOP_ALIGNMENT, * BOTTOM_ALIGNMENT, or CENTER_ALIGNMENT */ public void setVerticalAlignment(int vertAlignment) { this.vertAlignment = vertAlignment; } /** * Gets the vertical alignment of the text/image. * * @return the vertical alingment constant */ public int getVerticalAlignment() { return vertAlignment; } //TODO: support for alignment X and Y //TODO: tweak getTextX/Y by reusing width/height /** * Gets the yoffset if the current font is an instanceof AngelCodeFont, * otherwise returns 0. This method on its own does not change the protected * variable <tt>yoffset</tt>. Whenever the text changes, this method is * used to store the new offset in the <tt>yoffset</tt> variable. * * @return the yoffset of the font if it is an instanceof AngelCodeFont, * otherwise 0 */ protected float getYOffset(String s) { Font font = getFont(); if (s==null||s.length()==0) return 0; else if (font instanceof AngelCodeFont) return ((AngelCodeFont)font).getYOffset(s); else return 0; } public float getYOffset() { return yoff; } public float getTextHeight() { String str = getText()!=null ? getText() : ""; return getFont().getHeight(str)-yoff; //return getFont().getLineHeight(); } public float getTextWidth() { String str = getText()!=null ? getText() : ""; return getFont().getWidth(str); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.jsr223; import org.apache.tinkerpop.gremlin.util.CoreImports; import javax.script.Bindings; import javax.script.ScriptContext; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.ServiceConfigurationError; import java.util.ServiceLoader; import java.util.stream.Collectors; import java.util.stream.Stream; /** * The {@code ScriptEngineManager} implements a discovery, instantiation and configuration mechanism for * {@link GremlinScriptEngine} classes and also maintains a collection of key/value pairs storing state shared by all * engines created by it. This class uses the {@code ServiceProvider} mechanism to enumerate all the * implementations of <code>GremlinScriptEngineFactory</code>. The <code>ScriptEngineManager</code> provides a method * to return a list of all these factories as well as utility methods which look up factories on the basis of language * name, file extension and mime type. * <p/> * The {@code Bindings} of key/value pairs, referred to as the "Global Scope" maintained by the manager is available * to all instances of @code ScriptEngine} created by the {@code GremlinScriptEngineManager}. The values * in the {@code Bindings} are generally exposed in all scripts. * <p/> * This class is based quite heavily on the workings of the {@code ScriptEngineManager} supplied in the * {@code javax.script} packages, but adds some additional features that are specific to Gremlin and TinkerPop. * Unfortunately, it's not easily possible to extend {@code ScriptEngineManager} directly as there certain behaviors * don't appear to be be straightforward to implement and member variables are all private. It is important to note * that this class is designed to provide support for "Gremlin-enabled" {@code ScriptEngine} instances (i.e. those * that extend from {@link GremlinScriptEngine}) and is not meant to manage just any {@code ScriptEngine} instance * that may be on the path. * <p/> * As this is a "Gremlin" {@code ScriptEngine}, certain common imports are automatically applied when a * {@link GremlinScriptEngine} is instantiated via the {@link GremlinScriptEngineFactory}.. Initial imports from * gremlin-core come from the {@link CoreImports}. * * @author Stephen Mallette (http://stephen.genoprime.com) */ public class DefaultGremlinScriptEngineManager implements GremlinScriptEngineManager { private static final boolean DEBUG = false; /** * Set of script engine factories discovered. */ private final HashSet<GremlinScriptEngineFactory> engineSpis = new HashSet<>(); /** * Map of engine name to script engine factory. */ private final HashMap<String, GremlinScriptEngineFactory> nameAssociations = new HashMap<>(); /** * Map of script file extension to script engine factory. */ private final HashMap<String, GremlinScriptEngineFactory> extensionAssociations = new HashMap<>(); /** * Map of script script MIME type to script engine factory. */ private final HashMap<String, GremlinScriptEngineFactory> mimeTypeAssociations = new HashMap<>(); /** * Global bindings associated with script engines created by this manager. */ private Bindings globalScope = new ConcurrentBindings(); /** * List of extensions for the {@link GremlinScriptEngineManager} which will be used to supply * {@link Customizer} instances to {@link GremlinScriptEngineFactory} that are instantiated. */ private List<GremlinModule> modules = new ArrayList<>(); /** * The effect of calling this constructor is the same as calling * {@code DefaultGremlinScriptEngineManager(Thread.currentThread().getContextClassLoader())}. */ public DefaultGremlinScriptEngineManager() { final ClassLoader ctxtLoader = Thread.currentThread().getContextClassLoader(); initEngines(ctxtLoader); } /** * This constructor loads the implementations of {@link GremlinScriptEngineFactory} visible to the given * {@code ClassLoader} using the {@code ServiceLoader} mechanism. If loader is <code>null</code>, the script * engine factories that are bundled with the platform and that are in the usual extension directories * (installed extensions) are loaded. */ public DefaultGremlinScriptEngineManager(final ClassLoader loader) { initEngines(loader); } @Override public List<Customizer> getCustomizers(final String scriptEngineName) { return modules.stream().flatMap(module -> { final Optional<Customizer[]> moduleCustomizers = module.getCustomizers(scriptEngineName); return Stream.of(moduleCustomizers.orElse(new Customizer[0])); }).collect(Collectors.toList()); } @Override public void addModule(final GremlinModule module) { // TODO: should modules be a set based on "name" to ensure uniqueness? not sure what bad stuff can happen with dupes if (module != null) modules.add(module); } /** * Stores the specified {@code Bindings} as a global for all {@link GremlinScriptEngine} objects created by it. * If the bindings are to be updated by multiple threads it is recommended that a {@link ConcurrentBindings} * instance is supplied. * * @throws IllegalArgumentException if bindings is null. */ @Override public synchronized void setBindings(final Bindings bindings) { if (null == bindings) throw new IllegalArgumentException("Global scope cannot be null."); globalScope = bindings; } /** * Gets the bindings of the {@code Bindings} in global scope. */ @Override public Bindings getBindings() { return globalScope; } /** * Sets the specified key/value pair in the global scope. The key may not be null or empty. * * @throws IllegalArgumentException if key is null or empty. */ @Override public void put(final String key, final Object value) { if (null == key) throw new IllegalArgumentException("key may not be null"); if (key.isEmpty()) throw new IllegalArgumentException("key may not be empty"); globalScope.put(key, value); } /** * Gets the value for the specified key in the global scope. */ @Override public Object get(final String key) { return globalScope.get(key); } /** * Looks up and creates a {@link GremlinScriptEngine} for a given name. The algorithm first searches for a * {@link GremlinScriptEngineFactory} that has been registered as a handler for the specified name using the * {@link #registerEngineExtension(String, GremlinScriptEngineFactory)} method. If one is not found, it searches * the set of {@code GremlinScriptEngineFactory} instances stored by the constructor for one with the specified * name. If a {@code ScriptEngineFactory} is found by either method, it is used to create instance of * {@link GremlinScriptEngine}. * * @param shortName The short name of the {@link GremlinScriptEngine} implementation returned by the * {@link GremlinScriptEngineFactory#getNames} method. * @return A {@link GremlinScriptEngine} created by the factory located in the search. Returns {@code null} * if no such factory was found. The global scope of this manager is applied to the newly created * {@link GremlinScriptEngine} * @throws NullPointerException if shortName is {@code null}. */ @Override public GremlinScriptEngine getEngineByName(final String shortName) { if (null == shortName) throw new NullPointerException(); //look for registered name first Object obj; if (null != (obj = nameAssociations.get(shortName))) { final GremlinScriptEngineFactory spi = (GremlinScriptEngineFactory) obj; try { return createGremlinScriptEngine(spi); } catch (Exception exp) { if (DEBUG) exp.printStackTrace(); } } for (GremlinScriptEngineFactory spi : engineSpis) { List<String> names = null; try { names = spi.getNames(); } catch (Exception exp) { if (DEBUG) exp.printStackTrace(); } if (names != null) { for (String name : names) { if (shortName.equals(name)) { try { return createGremlinScriptEngine(spi); } catch (Exception exp) { if (DEBUG) exp.printStackTrace(); } } } } } return null; } /** * Look up and create a {@link GremlinScriptEngine} for a given extension. The algorithm * used by {@link #getEngineByName(String)} is used except that the search starts by looking for a * {@link GremlinScriptEngineFactory} registered to handle the given extension using * {@link #registerEngineExtension(String, GremlinScriptEngineFactory)}. * * @return The engine to handle scripts with this extension. Returns {@code null} if not found. * @throws NullPointerException if extension is {@code null}. */ @Override public GremlinScriptEngine getEngineByExtension(final String extension) { if (null == extension) throw new NullPointerException(); //look for registered extension first Object obj; if (null != (obj = extensionAssociations.get(extension))) { final GremlinScriptEngineFactory spi = (GremlinScriptEngineFactory) obj; try { return createGremlinScriptEngine(spi); } catch (Exception exp) { if (DEBUG) exp.printStackTrace(); } } for (GremlinScriptEngineFactory spi : engineSpis) { List<String> exts = null; try { exts = spi.getExtensions(); } catch (Exception exp) { if (DEBUG) exp.printStackTrace(); } if (exts == null) continue; for (String ext : exts) { if (extension.equals(ext)) { try { return createGremlinScriptEngine(spi); } catch (Exception exp) { if (DEBUG) exp.printStackTrace(); } } } } return null; } /** * Look up and create a {@link GremlinScriptEngine} for a given mime type. The algorithm used by * {@link #getEngineByName(String)} is used except that the search starts by looking for a * {@link GremlinScriptEngineFactory} registered to handle the given mime type using * {@link #registerEngineMimeType(String, GremlinScriptEngineFactory)}. * * @param mimeType The given mime type * @return The engine to handle scripts with this mime type. Returns {@code null} if not found. * @throws NullPointerException if mime-type is {@code null}. */ @Override public GremlinScriptEngine getEngineByMimeType(final String mimeType) { if (null == mimeType) throw new NullPointerException(); //look for registered types first Object obj; if (null != (obj = mimeTypeAssociations.get(mimeType))) { final GremlinScriptEngineFactory spi = (GremlinScriptEngineFactory) obj; try { return createGremlinScriptEngine(spi); } catch (Exception exp) { if (DEBUG) exp.printStackTrace(); } } for (GremlinScriptEngineFactory spi : engineSpis) { List<String> types = null; try { types = spi.getMimeTypes(); } catch (Exception exp) { if (DEBUG) exp.printStackTrace(); } if (types == null) continue; for (String type : types) { if (mimeType.equals(type)) { try { return createGremlinScriptEngine(spi); } catch (Exception exp) { if (DEBUG) exp.printStackTrace(); } } } } return null; } /** * Returns a list whose elements are instances of all the {@link GremlinScriptEngineFactory} classes * found by the discovery mechanism. * * @return List of all discovered {@link GremlinScriptEngineFactory} objects. */ @Override public List<GremlinScriptEngineFactory> getEngineFactories() { final List<GremlinScriptEngineFactory> res = new ArrayList<>(engineSpis.size()); res.addAll(engineSpis.stream().collect(Collectors.toList())); return Collections.unmodifiableList(res); } /** * Registers a {@link GremlinScriptEngineFactory} to handle a language name. Overrides any such association found * using the discovery mechanism. * * @param name The name to be associated with the {@link GremlinScriptEngineFactory} * @param factory The class to associate with the given name. * @throws NullPointerException if any of the parameters is null. */ @Override public void registerEngineName(final String name, final GremlinScriptEngineFactory factory) { if (null == name || null == factory) throw new NullPointerException(); nameAssociations.put(name, factory); } /** * Registers a {@link GremlinScriptEngineFactory} to handle a mime type. Overrides any such association found using * the discovery mechanism. * * @param type The mime type to be associated with the {@link GremlinScriptEngineFactory}. * @param factory The class to associate with the given mime type. * @throws NullPointerException if any of the parameters is null. */ @Override public void registerEngineMimeType(final String type, final GremlinScriptEngineFactory factory) { if (null == type || null == factory) throw new NullPointerException(); mimeTypeAssociations.put(type, factory); } /** * Registers a {@link GremlinScriptEngineFactory} to handle an extension. Overrides any such association found * using the discovery mechanism. * * @param extension The extension type to be associated with the {@link GremlinScriptEngineFactory} * @param factory The class to associate with the given extension. * @throws NullPointerException if any of the parameters is null. */ @Override public void registerEngineExtension(final String extension, final GremlinScriptEngineFactory factory) { if (null == extension || null == factory) throw new NullPointerException(); extensionAssociations.put(extension, factory); } private ServiceLoader<GremlinScriptEngineFactory> getServiceLoader(final ClassLoader loader) { if (loader != null) { return ServiceLoader.load(GremlinScriptEngineFactory.class, loader); } else { return ServiceLoader.loadInstalled(GremlinScriptEngineFactory.class); } } private void initEngines(final ClassLoader loader) { // always need this module for a scriptengine to be "Gremlin-enabled" modules.add(CoreGremlinModule.INSTANCE); Iterator<GremlinScriptEngineFactory> itty; try { final ServiceLoader<GremlinScriptEngineFactory> sl = AccessController.doPrivileged( (PrivilegedAction<ServiceLoader<GremlinScriptEngineFactory>>) () -> getServiceLoader(loader)); itty = sl.iterator(); } catch (ServiceConfigurationError err) { System.err.println("Can't find GremlinScriptEngineFactory providers: " + err.getMessage()); if (DEBUG) err.printStackTrace(); // do not throw any exception here. user may want to manager their own factories using this manager // by explicit registration (by registerXXX) methods. return; } try { while (itty.hasNext()) { try { final GremlinScriptEngineFactory factory = itty.next(); factory.setCustomizerManager(this); engineSpis.add(factory); } catch (ServiceConfigurationError err) { System.err.println("GremlinScriptEngineManager providers.next(): " + err.getMessage()); if (DEBUG) err.printStackTrace(); } } } catch (ServiceConfigurationError err) { System.err.println("GremlinScriptEngineManager providers.hasNext(): " + err.getMessage()); if (DEBUG) err.printStackTrace(); // do not throw any exception here. user may want to manage their own factories using this manager // by explicit registration (by registerXXX) methods. } } private GremlinScriptEngine createGremlinScriptEngine(final GremlinScriptEngineFactory spi) { final GremlinScriptEngine engine = spi.getScriptEngine(); engine.setBindings(getBindings(), ScriptContext.GLOBAL_SCOPE); return engine; } }
package com.akjava.gwt.threejsexamples.client.examples; import com.akjava.gwt.lib.client.JavaScriptUtils; import com.akjava.gwt.stats.client.Stats; import com.akjava.gwt.three.client.examples.js.THREEExp; import com.akjava.gwt.three.client.examples.js.controls.OrbitControls; import com.akjava.gwt.three.client.gwt.GWTParamUtils; import com.akjava.gwt.three.client.gwt.JSParameter; import com.akjava.gwt.three.client.gwt.core.Intersect; import com.akjava.gwt.three.client.java.ui.example.AbstractExample; import com.akjava.gwt.three.client.java.utils.GWTThreeUtils; import com.akjava.gwt.three.client.js.THREE; import com.akjava.gwt.three.client.js.cameras.PerspectiveCamera; import com.akjava.gwt.three.client.js.core.Clock; import com.akjava.gwt.three.client.js.core.Geometry; import com.akjava.gwt.three.client.js.core.Object3D; import com.akjava.gwt.three.client.js.core.Raycaster; import com.akjava.gwt.three.client.js.extras.ImageUtils; import com.akjava.gwt.three.client.js.lights.DirectionalLight; import com.akjava.gwt.three.client.js.loaders.JSONLoader; import com.akjava.gwt.three.client.js.loaders.JSONLoader.JSONLoadHandler; import com.akjava.gwt.three.client.js.materials.Material; import com.akjava.gwt.three.client.js.materials.MeshPhongMaterial; import com.akjava.gwt.three.client.js.math.Euler; import com.akjava.gwt.three.client.js.math.Matrix4; import com.akjava.gwt.three.client.js.math.Vector2; import com.akjava.gwt.three.client.js.math.Vector3; import com.akjava.gwt.three.client.js.objects.Line; import com.akjava.gwt.three.client.js.objects.Mesh; import com.akjava.gwt.three.client.js.renderers.WebGLRenderer; import com.akjava.gwt.three.client.js.scenes.Scene; import com.akjava.gwt.three.client.js.textures.Texture; import com.akjava.gwt.threejsexamples.client.LabeledInputRangeWidget; import com.google.gwt.core.client.JsArray; import com.google.gwt.event.dom.client.ChangeEvent; import com.google.gwt.event.dom.client.ChangeHandler; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.dom.client.MouseDownEvent; import com.google.gwt.event.dom.client.MouseDownHandler; import com.google.gwt.event.dom.client.MouseMoveEvent; import com.google.gwt.event.dom.client.MouseMoveHandler; import com.google.gwt.event.dom.client.MouseUpEvent; import com.google.gwt.event.dom.client.MouseUpHandler; import com.google.gwt.event.logical.shared.ValueChangeEvent; import com.google.gwt.event.logical.shared.ValueChangeHandler; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.CheckBox; import com.google.gwt.user.client.ui.FocusPanel; import com.google.gwt.user.client.ui.ListBox; import com.google.gwt.user.client.ui.VerticalPanel; public class DecalExample extends AbstractExample{ @Override public String getName() { return "decal"; } @Override public void animate(double timestamp) { render(timestamp); stats.update();//really deprecate?many still use this } private WebGLRenderer renderer; private Scene scene; private PerspectiveCamera camera; private Stats stats; double SCREEN_WIDTH; double SCREEN_HEIGHT; private int windowHalfX,windowHalfY; //private int mouseX,mouseY; Clock clock; double fov; Vector3 cameraTarget; private OrbitControls controls; private Line line; private Raycaster raycaster; private Mesh mouseHelper; private Vector2 mouse; private Mesh mesh; private JSParameter intersection; private JsArray<Mesh> decals; @Override public void init() { decals=JavaScriptUtils.createJSArray(); Texture decalDiffuse = ImageUtils.loadTexture( "textures/decal/decal-diffuse.png" ); Texture decalNormal = ImageUtils.loadTexture( "textures/decal/decal-normal.jpg" ); decalMaterial = THREE.MeshPhongMaterial( GWTParamUtils.MeshPhongMaterial().specular(0x444444).map(decalDiffuse).normalMap(decalNormal).normalScale(THREE.Vector2(1,1)).shininess(30).transparent(true).depthTest(true).depthWrite(false).polygonOffset(true).polygonOffsetFactor(- 4).wireframe(false) );//var decalMaterial = new THREE.MeshPhongMaterial( {specular: 0x444444,map: decalDiffuse,normalMap: decalNormal,normalScale: new THREE.Vector2( 1, 1 ),shininess: 30,transparent: true,depthTest: true,depthWrite: false,polygonOffset: true,polygonOffsetFactor: - 4,wireframe: false} ); p = THREE.Vector3( 0, 0, 0 ); r = THREE.Euler( 0, 0, 0 ); s = THREE.Vector3( 10, 10, 10 ); up = THREE.Vector3( 0, 1, 0 ); check = THREE.Vector3( 1, 1, 1 ); intersection=JSParameter.createParameter().set("intersects", false).set("point", THREE.Vector3()).set("normal", THREE.Vector3()); fov=45; mouse=THREE.Vector2(); clock=THREE.Clock(); SCREEN_WIDTH = getWindowInnerWidth(); SCREEN_HEIGHT = getWindowInnerHeight(); windowHalfX= (int)(SCREEN_WIDTH/2); windowHalfY= (int)(SCREEN_HEIGHT/2); FocusPanel container = createContainerPanel(); // renderer renderer = THREE.WebGLRenderer( GWTParamUtils.WebGLRenderer().antialias(true) );//THREE.WebGLRenderer( { antialias: true } ); renderer.setPixelRatio( GWTThreeUtils.getWindowDevicePixelRatio() ); renderer.setSize( SCREEN_WIDTH, SCREEN_HEIGHT ); container.getElement().appendChild( renderer.getDomElement() ); // scene scene = THREE.Scene(); // camera camera = THREE.PerspectiveCamera(55, getWindowInnerWidth()/getWindowInnerHeight(), 1, 1000); camera.getPosition().set(0, 0, 100); cameraTarget=THREE.Vector3(); controls = THREEExp.OrbitControls( camera, renderer.getDomElement());//controls = new THREE.OrbitControls( camera, renderer.domElement ); controls.setMinDistance(50);//controls.minDistance = 50; controls.setMaxDistance(200);//controls.maxDistance = 200; scene.add( THREE.AmbientLight( 0x443333 ) );//scene.add( new THREE.AmbientLight( 0x443333 ) ); DirectionalLight light = THREE.DirectionalLight( 0xffddcc, 1 );//var light = new THREE.DirectionalLight( 0xffddcc, 1 ); light.getPosition().set( 1, 0.75, 0.5 );//light.position.set( 1, 0.75, 0.5 ); scene.add( light ); light = THREE.DirectionalLight( 0xccccff, 1 );//var light = new THREE.DirectionalLight( 0xccccff, 1 ); light.getPosition().set( -1, 0.75, -0.5 );//light.position.set( -1, 0.75, -0.5 ); scene.add( light ); Geometry geometry = THREE.Geometry();//var geometry = new THREE.Geometry(); geometry.getVertices().push( THREE.Vector3() );//geometry.vertices.push( new THREE.Vector3(), new THREE.Vector3() ); geometry.getVertices().push( THREE.Vector3() ); line = THREE.Line( geometry, THREE.LineBasicMaterial( GWTParamUtils.LineBasicMaterial().linewidth(4) ) );//line = new THREE.Line( geometry, new THREE.LineBasicMaterial( { linewidth: 4 } ) ); scene.add( line ); loadLeePerrySmith(); raycaster = THREE.Raycaster();//raycaster = new THREE.Raycaster(); mouseHelper = THREE.Mesh( THREE.BoxGeometry( 1, 1, 10 ), THREE.MeshNormalMaterial() );//mouseHelper = new THREE.Mesh( new THREE.BoxGeometry( 1, 1, 10 ), new THREE.MeshNormalMaterial() ); mouseHelper.setVisible(false);//mouseHelper.visible = false; scene.add( mouseHelper ); container.addMouseDownHandler(new MouseDownHandler() { @Override public void onMouseDown(MouseDownEvent event) { moved = false; } }); container.addMouseUpHandler(new MouseUpHandler() { @Override public void onMouseUp(MouseUpEvent event) { checkIntersection(); if ( ! moved ) shoot(); } }); container.addMouseMoveHandler(new MouseMoveHandler() { @Override public void onMouseMove(MouseMoveEvent event) { //clientX is not support frame int x=event.getX(); int y=event.getY(); mouse.setX(( x / getWindowInnerWidth() ) * 2 - 1);//mouse.x = ( x / window.innerWidth ) * 2 - 1; mouse.setY(- ( y / getWindowInnerHeight() ) * 2 + 1);//mouse.y = - ( y / window.innerHeight ) * 2 + 1; checkIntersection(); } }); //TODO //container.addTouchMoveHandler(new ) //todo change on UI //stats stats = Stats.create(); stats.setPosition(0, 0); container.getElement().appendChild(stats.domElement()); //add html info container.add(createAbsoluteHTML("<strong>Decal Splatter</strong><br />Click or tap to shoot.</p>" ,100,10)); //handle resize & gui initResizeHandlerAndGUI(); //setDebugAnimateOneTimeOnly(true); } boolean projectionCamera=false; private boolean isProjectionCamera(){ return projectionCamera; } private double getMaxScale(){ return maxScale; } private double getMinScale(){ return minScale; } private boolean isRotate(){ return rotate; } protected void shoot() { if ( isProjectionCamera()){//if ( params.projection == 'camera' ) { Vector3 dir = cameraTarget.clone();//var dir = camera.target.clone(); dir.sub( camera.getPosition());//dir.sub( camera.position ); p = intersection.getObject("point").cast(); Matrix4 m = THREE.Matrix4();//var m = new THREE.Matrix4(); Vector3 c = dir.clone(); c.negate(); c.multiplyScalar( 10 ); c.add( p ); m.lookAt( p, c, up ); m = m.extractRotation( m ); Object3D dummy = THREE.Object3D();//dummy = new THREE.Object3D(); dummy.getRotation().setFromRotationMatrix( m );//dummy.rotation.setFromRotationMatrix( m ); r.set( dummy.getRotation().getX(), dummy.getRotation().getY(), dummy.getRotation().getZ());//r.set( dummy.rotation.x, dummy.rotation.y, dummy.rotation.z ); } else { p = intersection.getObject("point").cast(); r.copy( mouseHelper.getRotation());//r.copy( mouseHelper.rotation ); } double scale = getMinScale() + Math.random() * (getMaxScale() -getMinScale());//var scale = params.minScale + Math.random() * ( params.maxScale - params.minScale ); s.set( scale, scale, scale ); if ( isRotate()) r.setZ(Math.random() * 2 * Math.PI);//if ( params.rotate ) r.z = Math.random() * 2 * Math.PI; MeshPhongMaterial material = decalMaterial.clone().cast(); material.getColor().setHex( (int)(Math.random() * 0xffffff) );//material.color.setHex( Math.random() * 0xffffff ); Mesh m = THREE.Mesh( THREEExp.DecalGeometry( mesh, p, r, s, check ), material );//var m = new THREE.Mesh( new THREE.DecalGeometry( mesh, p, r, s, check ), material ); decals.push( m ); scene.add( m ); } protected void checkIntersection() { if (mesh==null ) return;//not loaded raycaster.setFromCamera( mouse, camera ); JsArray<Mesh> array=JavaScriptUtils.createJSArray(mesh); JsArray<Intersect> intersects = raycaster.intersectObjects(array); if ( intersects.length() > 0 ) { Vector3 p = intersects.get(0).getPoint(); mouseHelper.getPosition().copy( p );//mouseHelper.position.copy( p ); Vector3 pt=intersection.getObject("point").cast(); pt.copy( p );//intersection.point.copy( p ); Vector3 n = intersects.get(0).getFace().getNormal().clone();//var n = intersects[ 0 ].face.normal.clone(); n.multiplyScalar( 10 ); n.add( p);//n.add( intersects[ 0 ].point ); Vector3 normal=intersection.getObject("normal").cast(); normal.copy( intersects.get(0).getFace().getNormal());//intersection.normal.copy( intersects[ 0 ].face.normal ); mouseHelper.lookAt( n ); line.getGeometry().getVertices().get(0).copy( pt);//line.geometry.vertices[ 0 ].copy( intersection.point ); line.getGeometry().getVertices().get(1).copy( n );//line.geometry.vertices[ 1 ].copy( n ); line.getGeometry().setVerticesNeedUpdate(true);//line.geometry.verticesNeedUpdate = true; intersection.set("intersects",true);//intersection.intersects = true; } else { intersection.set("intersects",false);//intersection.intersects = false; } } private boolean moved; private Vector3 up; private Euler r; private Vector3 p; private Vector3 s; private MeshPhongMaterial decalMaterial; private Vector3 check; private int maxScale=20; private int minScale=10; private boolean rotate=true; private void removeDecals(){ for(int i=0;i<decals.length();i++){ scene.remove(decals.get(i)); decals.set(i, null); } decals=JavaScriptUtils.createJSArray(); } private void loadLeePerrySmith() { JSONLoader loader=THREE.JSONLoader(); loader.load("obj/leeperrysmith/LeePerrySmith.js", new JSONLoadHandler() { @Override public void loaded(Geometry geometry, JsArray<Material> materials) { MeshPhongMaterial material = THREE.MeshPhongMaterial( GWTParamUtils.MeshPhongMaterial().specular(0x111111).map(ImageUtils.loadTexture( "obj/leeperrysmith/Map-COL.jpg" )).specularMap(ImageUtils.loadTexture( "obj/leeperrysmith/Map-SPEC.jpg" )).normalMap(ImageUtils.loadTexture( "obj/leeperrysmith/Infinite-Level_02_Tangent_SmoothUV.jpg" )).normalScale(THREE.Vector2( 0.75,0.75)).shininess(25) );//var material = new THREE.MeshPhongMaterial( {specular: 0x111111,map: THREE.ImageUtils.loadTexture( 'obj/leeperrysmith/Map-COL.jpg' ),specularMap: THREE.ImageUtils.loadTexture( 'obj/leeperrysmith/Map-SPEC.jpg' ),normalMap: THREE.ImageUtils.loadTexture( 'obj/leeperrysmith/Infinite-Level_02_Tangent_SmoothUV.jpg' ),normalScale: new THREE.Vector2( 0.75, 0.75 ),shininess: 25} ); mesh = THREE.Mesh( geometry, material );//mesh = new THREE.Mesh( geometry, material ); scene.add( mesh ); mesh.getScale().set( 10, 10, 10 );//mesh.scale.set( 10, 10, 10 ); } }); } private void initResizeHandlerAndGUI() { VerticalPanel gui=addResizeHandlerAndCreateGUIPanel(); gui.setWidth("200px");//some widget broke,like checkbox without parent size gui.setSpacing(2); final ListBox listBox=new ListBox(); gui.add(listBox); listBox.addItem("From cam to mesh"); listBox.addItem("Normal to mesh"); listBox.setSelectedIndex(1); listBox.addChangeHandler(new ChangeHandler() { @Override public void onChange(ChangeEvent event) { projectionCamera=listBox.getSelectedIndex()==0; } }); LabeledInputRangeWidget minInput=new LabeledInputRangeWidget("minScale", 1, 30,1); gui.add(minInput); minInput.setValue(10); minInput.addtRangeListener(new ValueChangeHandler<Number>() { @Override public void onValueChange(ValueChangeEvent<Number> event) { minScale=event.getValue().intValue(); } }); gui.add(minInput); LabeledInputRangeWidget maxInput=new LabeledInputRangeWidget("maxScale", 1, 30,1); gui.add(maxInput); maxInput.setValue(20); maxInput.addtRangeListener(new ValueChangeHandler<Number>() { @Override public void onValueChange(ValueChangeEvent<Number> event) { maxScale=event.getValue().intValue(); } }); gui.add(maxInput); CheckBox rotateCheck=new CheckBox("rotate"); rotateCheck.addValueChangeHandler(new ValueChangeHandler<Boolean>() { @Override public void onValueChange(ValueChangeEvent<Boolean> event) { rotate=event.getValue(); } }); gui.add(rotateCheck); rotateCheck.setValue(true); Button clearBt=new Button("clear",new ClickHandler() { @Override public void onClick(ClickEvent event) { removeDecals(); } }); gui.add(clearBt); } public void onWindowResize() { SCREEN_WIDTH = getWindowInnerWidth(); SCREEN_HEIGHT = getWindowInnerHeight(); //re read because of double camera.setAspect(SCREEN_WIDTH / SCREEN_HEIGHT); camera.updateProjectionMatrix(); renderer.setSize( SCREEN_WIDTH , SCREEN_HEIGHT ); } public void render(double now) { renderer.render(scene, camera); } @Override public String getTokenKey() { return "decal"; } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.python.fixtures; import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.intellij.application.options.CodeStyle; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupEx; import com.intellij.find.findUsages.CustomUsageSearcher; import com.intellij.find.findUsages.FindUsagesOptions; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.module.Module; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.SdkModificator; import com.intellij.openapi.roots.OrderRootType; import com.intellij.openapi.roots.impl.FilePropertyPusher; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.StandardFileSystems; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.platform.DirectoryProjectConfigurator; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.refactoring.RefactoringActionHandler; import com.intellij.testFramework.*; import com.intellij.testFramework.fixtures.*; import com.intellij.testFramework.fixtures.impl.LightTempDirTestFixtureImpl; import com.intellij.usageView.UsageInfo; import com.intellij.usages.Usage; import com.intellij.usages.rules.PsiElementUsage; import com.intellij.util.CommonProcessors.CollectProcessor; import com.intellij.util.IncorrectOperationException; import com.jetbrains.python.PythonHelpersLocator; import com.jetbrains.python.PythonLanguage; import com.jetbrains.python.PythonTestUtil; import com.jetbrains.python.codeInsight.completion.PyModuleNameCompletionContributor; import com.jetbrains.python.documentation.PyDocumentationSettings; import com.jetbrains.python.documentation.PythonDocumentationProvider; import com.jetbrains.python.documentation.docstrings.DocStringFormat; import com.jetbrains.python.namespacePackages.PyNamespacePackagesService; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyFileImpl; import com.jetbrains.python.psi.impl.PythonLanguageLevelPusher; import com.jetbrains.python.psi.search.PySearchUtilBase; import com.jetbrains.python.psi.types.PyType; import com.jetbrains.python.psi.types.TypeEvalContext; import com.jetbrains.python.sdk.PythonSdkUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Assert; import javax.swing.*; import java.io.File; import java.util.*; import java.util.function.Consumer; /** * @author yole */ @TestDataPath("$CONTENT_ROOT/../testData/") public abstract class PyTestCase extends UsefulTestCase { protected static final PyLightProjectDescriptor ourPy2Descriptor = new PyLightProjectDescriptor(LanguageLevel.PYTHON27); protected static final PyLightProjectDescriptor ourPyLatestDescriptor = new PyLightProjectDescriptor(LanguageLevel.getLatest()); protected CodeInsightTestFixture myFixture; protected void assertProjectFilesNotParsed(@NotNull PsiFile currentFile) { assertRootNotParsed(currentFile, myFixture.getTempDirFixture().getFile("."), null); } protected void assertProjectFilesNotParsed(@NotNull TypeEvalContext context) { assertRootNotParsed(context.getOrigin(), myFixture.getTempDirFixture().getFile("."), context); } protected void assertSdkRootsNotParsed(@NotNull PsiFile currentFile) { final Sdk testSdk = PythonSdkUtil.findPythonSdk(currentFile); for (VirtualFile root : testSdk.getRootProvider().getFiles(OrderRootType.CLASSES)) { assertRootNotParsed(currentFile, root, null); } } private void assertRootNotParsed(@NotNull PsiFile currentFile, @NotNull VirtualFile root, @Nullable TypeEvalContext context) { for (VirtualFile file : VfsUtil.collectChildrenRecursively(root)) { final PyFile pyFile = PyUtil.as(myFixture.getPsiManager().findFile(file), PyFile.class); if (pyFile != null && !pyFile.equals(currentFile) && (context == null || !context.maySwitchToAST(pyFile))) { assertNotParsed(pyFile); } } } @Nullable protected static VirtualFile getVirtualFileByName(String fileName) { final VirtualFile path = LocalFileSystem.getInstance().findFileByPath(fileName.replace(File.separatorChar, '/')); if (path != null) { refreshRecursively(path); return path; } return null; } /** * Reformats currently configured file. */ protected final void reformatFile() { WriteCommandAction.runWriteCommandAction(null, () -> doPerformFormatting()); } private void doPerformFormatting() throws IncorrectOperationException { final PsiFile file = myFixture.getFile(); final TextRange myTextRange = file.getTextRange(); CodeStyleManager.getInstance(myFixture.getProject()).reformatText(file, myTextRange.getStartOffset(), myTextRange.getEndOffset()); } @Override protected void setUp() throws Exception { initApplication(); super.setUp(); IdeaTestFixtureFactory factory = IdeaTestFixtureFactory.getFixtureFactory(); TestFixtureBuilder<IdeaProjectTestFixture> fixtureBuilder = factory.createLightFixtureBuilder(getProjectDescriptor()); final IdeaProjectTestFixture fixture = fixtureBuilder.getFixture(); myFixture = IdeaTestFixtureFactory.getFixtureFactory().createCodeInsightFixture(fixture, createTempDirFixture()); myFixture.setTestDataPath(getTestDataPath()); if (SwingUtilities.isEventDispatchThread()) { myFixture.setUp(); } else { ApplicationManager.getApplication().invokeAndWait(() -> { try { myFixture.setUp(); } catch (final Exception e) { throw new RuntimeException("Error running setup", e); } }); } } private static void initApplication() { TestApplicationManager.getInstance(); } /** * @return fixture to be used as temporary dir. */ @NotNull protected TempDirTestFixture createTempDirFixture() { return new LightTempDirTestFixtureImpl(true); // "tmp://" dir by default } protected void runWithAdditionalFileInLibDir(@NotNull String relativePath, @NotNull String text, @NotNull Consumer<VirtualFile> fileConsumer) { final Sdk sdk = PythonSdkUtil.findPythonSdk(myFixture.getModule()); final VirtualFile libDir = PySearchUtilBase.findLibDir(sdk); if (libDir != null) { runWithAdditionalFileIn(relativePath, text, libDir, fileConsumer); } else { createAdditionalRootAndRunWithIt( sdk, "Lib", OrderRootType.CLASSES, root -> runWithAdditionalFileIn(relativePath, text, root, fileConsumer) ); } } protected void runWithAdditionalFileInSkeletonDir(@NotNull String relativePath, @NotNull String text, @NotNull Consumer<VirtualFile> fileConsumer) { final Sdk sdk = PythonSdkUtil.findPythonSdk(myFixture.getModule()); final VirtualFile skeletonsDir = PythonSdkUtil.findSkeletonsDir(sdk); if (skeletonsDir != null) { runWithAdditionalFileIn(relativePath, text, skeletonsDir, fileConsumer); } else { createAdditionalRootAndRunWithIt( sdk, PythonSdkUtil.SKELETON_DIR_NAME, PythonSdkUtil.BUILTIN_ROOT_TYPE, root -> runWithAdditionalFileIn(relativePath, text, root, fileConsumer) ); } } private static void runWithAdditionalFileIn(@NotNull String relativePath, @NotNull String text, @NotNull VirtualFile dir, @NotNull Consumer<VirtualFile> fileConsumer) { final VirtualFile file = VfsTestUtil.createFile(dir, relativePath, text); try { fileConsumer.accept(file); } finally { VfsTestUtil.deleteFile(file); } } protected void runWithAdditionalClassEntryInSdkRoots(@NotNull VirtualFile directory, @NotNull Runnable runnable) { final Sdk sdk = PythonSdkUtil.findPythonSdk(myFixture.getModule()); assertNotNull(sdk); runWithAdditionalRoot(sdk, directory, OrderRootType.CLASSES, (__) -> runnable.run()); } protected void runWithAdditionalClassEntryInSdkRoots(@NotNull String relativeTestDataPath, @NotNull Runnable runnable) { final String absPath = getTestDataPath() + "/" + relativeTestDataPath; final VirtualFile testDataDir = StandardFileSystems.local().findFileByPath(absPath); assertNotNull("Additional class entry directory '" + absPath + "' not found", testDataDir); runWithAdditionalClassEntryInSdkRoots(testDataDir, runnable); } private static void createAdditionalRootAndRunWithIt(@NotNull Sdk sdk, @NotNull String rootRelativePath, @NotNull OrderRootType rootType, @NotNull Consumer<VirtualFile> rootConsumer) { final VirtualFile tempRoot = VfsTestUtil.createDir(sdk.getHomeDirectory().getParent().getParent(), rootRelativePath); try { runWithAdditionalRoot(sdk, tempRoot, rootType, rootConsumer); } finally { VfsTestUtil.deleteFile(tempRoot); } } private static void runWithAdditionalRoot(@NotNull Sdk sdk, @NotNull VirtualFile root, @NotNull OrderRootType rootType, @NotNull Consumer<VirtualFile> rootConsumer) { WriteAction.run(() -> { final SdkModificator modificator = sdk.getSdkModificator(); assertNotNull(modificator); modificator.addRoot(root, rootType); modificator.commitChanges(); }); try { rootConsumer.accept(root); } finally { WriteAction.run(() -> { final SdkModificator modificator = sdk.getSdkModificator(); assertNotNull(modificator); modificator.removeRoot(root, rootType); modificator.commitChanges(); }); } } protected String getTestDataPath() { return PythonTestUtil.getTestDataPath(); } @Override protected void tearDown() throws Exception { try { PyNamespacePackagesService.getInstance(myFixture.getModule()).resetAllNamespacePackages(); PyModuleNameCompletionContributor.ENABLED = true; setLanguageLevel(null); myFixture.tearDown(); myFixture = null; FilePropertyPusher.EP_NAME.findExtensionOrFail(PythonLanguageLevelPusher.class).flushLanguageLevelCache(); } catch (Throwable e) { addSuppressedException(e); } finally { super.tearDown(); } } @Nullable protected LightProjectDescriptor getProjectDescriptor() { return ourPyLatestDescriptor; } @Nullable protected PsiReference findReferenceBySignature(final String signature) { int pos = findPosBySignature(signature); return findReferenceAt(pos); } @Nullable protected PsiReference findReferenceAt(int pos) { return myFixture.getFile().findReferenceAt(pos); } protected int findPosBySignature(String signature) { return PsiDocumentManager.getInstance(myFixture.getProject()).getDocument(myFixture.getFile()).getText().indexOf(signature); } private void setLanguageLevel(@Nullable LanguageLevel languageLevel) { PythonLanguageLevelPusher.setForcedLanguageLevel(myFixture.getProject(), languageLevel); } protected void runWithLanguageLevel(@NotNull LanguageLevel languageLevel, @NotNull Runnable runnable) { setLanguageLevel(languageLevel); try { runnable.run(); } finally { setLanguageLevel(null); } } protected void runWithDocStringFormat(@NotNull DocStringFormat format, @NotNull Runnable runnable) { final PyDocumentationSettings settings = PyDocumentationSettings.getInstance(myFixture.getModule()); final DocStringFormat oldFormat = settings.getFormat(); settings.setFormat(format); try { runnable.run(); } finally { settings.setFormat(oldFormat); } } protected void runWithSourceRoots(@NotNull List<VirtualFile> sourceRoots, @NotNull Runnable runnable) { final Module module = myFixture.getModule(); sourceRoots.forEach(root -> PsiTestUtil.addSourceRoot(module, root)); try { runnable.run(); } finally { sourceRoots.forEach(root -> PsiTestUtil.removeSourceRoot(module, root)); } } protected static void assertNotParsed(PsiFile file) { assertInstanceOf(file, PyFileImpl.class); assertNull("Operations should have been performed on stubs but caused file to be parsed: " + file.getVirtualFile().getPath(), ((PyFileImpl)file).getTreeElement()); } /** * @param name * @return class by its name from file */ @NotNull protected PyClass getClassByName(@NotNull final String name) { return myFixture.findElementByText("class " + name, PyClass.class); } /** * @see #moveByText(com.intellij.testFramework.fixtures.CodeInsightTestFixture, String) */ protected void moveByText(@NotNull final String testToFind) { moveByText(myFixture, testToFind); } /** * Finds some text and moves cursor to it (if found) * * @param fixture test fixture * @param testToFind text to find * @throws AssertionError if element not found */ public static void moveByText(@NotNull final CodeInsightTestFixture fixture, @NotNull final String testToFind) { final PsiElement element = fixture.findElementByText(testToFind, PsiElement.class); assert element != null : "No element found by text: " + testToFind; fixture.getEditor().getCaretModel().moveToOffset(element.getTextOffset()); } /** * Finds all usages of element. Works much like method in {@link com.intellij.testFramework.fixtures.CodeInsightTestFixture#findUsages(com.intellij.psi.PsiElement)}, * but supports {@link com.intellij.find.findUsages.CustomUsageSearcher} and {@link com.intellij.psi.search.searches.ReferencesSearch} as well * * @param element what to find * @return usages */ @NotNull protected Collection<PsiElement> findUsage(@NotNull final PsiElement element) { final Collection<PsiElement> result = new ArrayList<>(); final CollectProcessor<Usage> usageCollector = new CollectProcessor<>(); for (final CustomUsageSearcher searcher : CustomUsageSearcher.EP_NAME.getExtensions()) { searcher.processElementUsages(element, usageCollector, new FindUsagesOptions(myFixture.getProject())); } for (final Usage usage : usageCollector.getResults()) { if (usage instanceof PsiElementUsage) { result.add(((PsiElementUsage)usage).getElement()); } } for (final PsiReference reference : ReferencesSearch.search(element).findAll()) { result.add(reference.getElement()); } for (final UsageInfo info : myFixture.findUsages(element)) { result.add(info.getElement()); } return result; } /** * Returns elements certain element allows to navigate to (emulates CTRL+Click, actually). * You need to pass element as argument or * make sure your fixture is configured for some element (see {@link com.intellij.testFramework.fixtures.CodeInsightTestFixture#getElementAtCaret()}) * * @param element element to fetch navigate elements from (may be null: element under caret would be used in this case) * @return elements to navigate to */ @NotNull protected Set<PsiElement> getElementsToNavigate(@Nullable final PsiElement element) { final Set<PsiElement> result = new HashSet<>(); final PsiElement elementToProcess = ((element != null) ? element : myFixture.getElementAtCaret()); for (final PsiReference reference : elementToProcess.getReferences()) { final PsiElement directResolve = reference.resolve(); if (directResolve != null) { result.add(directResolve); } if (reference instanceof PsiPolyVariantReference) { for (final ResolveResult resolveResult : ((PsiPolyVariantReference)reference).multiResolve(true)) { result.add(resolveResult.getElement()); } } } return result; } /** * Clears provided file * * @param file file to clear */ protected void clearFile(@NotNull final PsiFile file) { CommandProcessor.getInstance().executeCommand(myFixture.getProject(), () -> ApplicationManager.getApplication().runWriteAction(() -> { for (final PsiElement element : file.getChildren()) { element.delete(); } }), null, null); } /** * Runs refactoring using special handler * * @param handler handler to be used */ protected void refactorUsingHandler(@NotNull final RefactoringActionHandler handler) { final Editor editor = myFixture.getEditor(); assertInstanceOf(editor, EditorEx.class); handler.invoke(myFixture.getProject(), editor, myFixture.getFile(), ((EditorEx)editor).getDataContext()); } /** * Configures project by some path. It is here to emulate {@link com.intellij.platform.PlatformProjectOpenProcessor} * * @param path path to open * @param configurator configurator to use */ protected void configureProjectByProjectConfigurators(@NotNull final String path, @NotNull final DirectoryProjectConfigurator configurator) { final VirtualFile newPath = myFixture.copyDirectoryToProject(path, String.format("%s%s%s", "temp_for_project_conf", File.pathSeparator, path)); final Ref<Module> moduleRef = new Ref<>(myFixture.getModule()); configurator.configureProject(myFixture.getProject(), newPath, moduleRef, false); } public static String getHelpersPath() { return new File(PythonHelpersLocator.getPythonCommunityPath(), "helpers").getPath(); } /** * Compares sets with string sorting them and displaying one-per-line to make comparision easier * * @param message message to display in case of error * @param actual actual set * @param expected expected set */ protected static void compareStringSets(@NotNull final String message, @NotNull final Set<String> actual, @NotNull final Set<String> expected) { final Joiner joiner = Joiner.on("\n"); Assert.assertEquals(message, joiner.join(new TreeSet<>(actual)), joiner.join(new TreeSet<>(expected))); } /** * Clicks certain button in document on caret position * * @param action what button to click (const from {@link IdeActions}) (btw, there should be some way to express it using annotations) * @see IdeActions */ protected final void pressButton(@NotNull final String action) { CommandProcessor.getInstance().executeCommand(myFixture.getProject(), () -> myFixture.performEditorAction(action), "", null); } @NotNull protected CommonCodeStyleSettings getCommonCodeStyleSettings() { return getCodeStyleSettings().getCommonSettings(PythonLanguage.getInstance()); } @NotNull protected CodeStyleSettings getCodeStyleSettings() { return CodeStyle.getSettings(myFixture.getProject()); } @NotNull protected CommonCodeStyleSettings.IndentOptions getIndentOptions() { return getCommonCodeStyleSettings().getIndentOptions(); } /** * When you have more than one completion variant, you may use this method providing variant to choose. * It only works for one caret (multiple carets not supported) and since it puts tab after completion, be sure to limit * line somehow (i.e. with comment). * <br/> * Example: "user.n[caret]." There are "name" and "nose" fields. * By calling this function with "nose" you will end with "user.nose ". */ protected final void completeCaretWithMultipleVariants(final String @NotNull ... desiredVariants) { final LookupElement[] lookupElements = myFixture.completeBasic(); final LookupEx lookup = myFixture.getLookup(); if (lookupElements != null && lookupElements.length > 1) { // More than one element returned, check directly because completion can't work in this case for (final LookupElement element : lookupElements) { final String suggestedString = element.getLookupString(); if (Arrays.asList(desiredVariants).contains(suggestedString)) { myFixture.getLookup().setCurrentItem(element); lookup.setCurrentItem(element); myFixture.completeBasicAllCarets('\t'); return; } } } } @NotNull protected PsiElement getElementAtCaret() { final PsiFile file = myFixture.getFile(); assertNotNull(file); return file.findElementAt(myFixture.getCaretOffset()); } public static void assertType(@NotNull String expectedType, @NotNull PyTypedElement element, @NotNull TypeEvalContext context) { assertType("Failed in " + context + " context", expectedType, element, context); } public static void assertType(@NotNull String message, @NotNull String expectedType, @NotNull PyTypedElement element, @NotNull TypeEvalContext context) { final PyType actual = context.getType(element); final String actualType = PythonDocumentationProvider.getTypeName(actual, context); assertEquals(message, expectedType, actualType); } public void addExcludedRoot(String rootPath) { final VirtualFile dir = myFixture.findFileInTempDir(rootPath); final Module module = myFixture.getModule(); assertNotNull(dir); PsiTestUtil.addExcludedRoot(module, dir); Disposer.register(myFixture.getProjectDisposable(), () -> PsiTestUtil.removeExcludedRoot(module, dir)); } public <T> void assertContainsInRelativeOrder(@NotNull final Iterable<? extends T> actual, final T @Nullable ... expected) { final List<T> actualList = Lists.newArrayList(actual); if (expected.length > 0) { T prev = expected[0]; int prevIndex = actualList.indexOf(prev); assertTrue(prev + " is not found in " + actualList, prevIndex >= 0); for (int i = 1; i < expected.length; i++) { final T next = expected[i]; final int nextIndex = actualList.indexOf(next); assertTrue(next + " is not found in " + actualList, nextIndex >= 0); assertTrue(prev + " should precede " + next + " in " + actualList, prevIndex < nextIndex); prev = next; prevIndex = nextIndex; } } } }
/* Copyright 2012 TripAdvisor, LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* ------------------------------------------------------------ * The HDFS-traversing code and file sorting code was written by * Rapleaf. Re-used and adapted with permission. Please see: * https://github.com/Rapleaf/HDFS-Backup * for the original code. * ------------------------------------------------------------ */ package com.tripadvisor.hadoop; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import java.io.*; import java.text.DecimalFormat; import java.util.*; import java.util.regex.*; import java.sql.*; /** Traverse hdfs filesystem, ignoring the same files that are ignored * by the BackupHdfs class. But process all files, without any * concern for their age. Compare their checksum / date / size * against the files on the local backup filesystem. * * @author tpalka@tripadvisor.com * @date Wed Jan 4 08:41:23 2012 */ public class VerifyHdfsBackup { private TablesToIgnore m_ignoreTables; // assume the hdfs block size. Prod cluster uses 128M, as opposed // to the hadoop default 64M. final static long N_BLOCK_SIZE = 128 * 1024 * 1024; public VerifyHdfsBackup() { } /** * Prints out usage */ static void usage() { System.err.println("Usage: hadoop com.tripadvisor.hadoop.VerifyHdfsBackup args\n" + " --hdfs-path path/on/hdfs\n" + " --local-path path/on/local/fs: path to hdfs backup\n" + " [--max-date UNIX-time]: don't verify any newer files\n" + " [--ignore-tables FILE]: list of tables to ignore\n" + " --from-file FILE: list of filenames to verify\n"); System.exit(1); } public static void main(String[] args) throws IOException { Path baseDir = null; String sLocalPathRoot = null; String sIgnoreTablesFilename = null; String sMaxDateString = null; String sFromFilename = null; for (int i=0 ; i<args.length ; i++) { if (args[i].equals("--hdfs-path")) { baseDir = new Path(args[++i]); continue; } if (args[i].equals("--local-path")) { sLocalPathRoot = args[++i]; continue; } if (args[i].equals("--ignore-tables")) { sIgnoreTablesFilename = args[++i]; continue; } if (args[i].equals("--max-date")) { sMaxDateString = args[++i]; continue; } if (args[i].equals("--from-file")) { sFromFilename = args[++i]; continue; } System.err.println("ERROR: unknown arg " + args[i]); usage(); } if (baseDir == null || sLocalPathRoot == null) { usage(); } // UNIX date for right now long maxDate = new java.util.Date().getTime() / 1000; if (sMaxDateString != null) { // UNIX date since epoch of last backup maxDate = Long.parseLong(sMaxDateString); } VerifyHdfsBackup bak = new VerifyHdfsBackup(); // initialize the list of tables to ignore if (sIgnoreTablesFilename != null) { bak.initializeTablesToIgnore(sIgnoreTablesFilename); } Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); if (sFromFilename != null) { BufferedReader in = null; try { in = new BufferedReader(new FileReader(sFromFilename)); String sFile; while ((sFile = in.readLine()) != null) { bak.checkDir(fs, new Path(sFile), sLocalPathRoot, maxDate); } } catch (Exception e) { System.out.println("ERROR: Failed to read from-file " + sFromFilename + ": " + e); } finally { try { in.close(); } catch (Exception e2) {} } } else { // If the HDFS path is a dir continue if (fs.getFileStatus(baseDir).isDir()) { System.out.println("Searching filesystem: " + baseDir.toUri().getPath()); bak.checkDir(fs, baseDir, sLocalPathRoot, maxDate); } } System.exit(0); } /** * Method to go though the HDFS filesystem in a DFS to find all * files * * fs:FileSystem object from HDFS * maxDate:Newest date for files to be backed up * p:Path in HDFS to look for files **/ public void checkDir(FileSystem fs, Path p, String sLocalPathRoot, long maxDate) { FileStatus[] fStat; try { String sPath = p.toUri().getPath(); // If this is a directory if (fs.getFileStatus(p).isDir()) { // ignore certain directories if ("dfstmp".equals(p.getName()) || "tmp".equals(p.getName()) || "jobtracker".equals(p.getName()) || sPath.startsWith("/mapred") || "ops".equals(p.getName()) || p.getName().startsWith("_distcp_logs") ) { return; } fStat = fs.listStatus(p); // Do a recursive call to all elements for (int i = 0; i < fStat.length; i++) { checkDir(fs, fStat[i].getPath(), sLocalPathRoot, maxDate); } } else { // If not a directory then we've found a file // ignore crc files if (p.getName().endsWith(".crc")) { return; } // ignore other files if (sPath.startsWith("/user/oozie/etl/workflows/")) { return; } // try to get the table name from the path. There are // various types of tables, from those replicated from // tripmonster to regular hive tables to partitioned // hive tables. We use table names to both exclude // some from the backup, and for the rest to dump out // the schema and partition name. if (m_ignoreTables != null && m_ignoreTables.doIgnoreFile(sPath)) { return; } // check the file FileStatus stat = fs.getFileStatus(p); // ignore files that are too new if ((stat.getModificationTime() / 1000) > maxDate) { System.out.println("IGNORING: " + sPath + " too new"); return; } // warn about files that have a mis-matching block // size. The checksum check will fail for them // anyways, so just catch it here. if (stat.getBlockSize() != N_BLOCK_SIZE) { System.out.println("ERROR: non-default block size (" + (stat.getBlockSize() / (1024*1024)) + "M) would fail checksum: " + sPath); return; } // get HDFS checksum FileChecksum ck = fs.getFileChecksum(p); String sCk, sCkShort; if (ck == null) { sCk = sCkShort = "<null>"; } else { sCk = ck.toString(); sCkShort = sCk.replaceAll("^.*:", ""); } System.out.println(sPath + " len=" + stat.getLen() + " " + stat.getOwner() + "/" + stat.getGroup() + " checksum=" + sCk); // find the local file String sFsPath = sLocalPathRoot + p.toUri().getPath(); File fLocal = new File(sFsPath); if (! fLocal.exists()) { Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(stat.getModificationTime()); System.out.println("ERROR: file does not exist: " + sFsPath + " hdfs-last-mtime=" + cal.getTime().toString()); return; } if (! fLocal.isFile()) { System.out.println("ERROR: path is not a file: " + sFsPath); return; } if (stat.getLen() != fLocal.length()) { System.out.println("ERROR: length mismatch: " + sFsPath + " hdfslen=" + stat.getLen() + " fslen=" + fLocal.length()); return; } // get local fs checksum FileChecksum ckLocal = getLocalFileChecksum(sFsPath); if (ckLocal == null) { System.out.println("ERROR Failed to get checksum for local file " + sFsPath); return; } // compare checksums as a string, to strip the // algorithm name from the beginning String sCkLocal = ckLocal.toString(); String sCkLocalShort = sCkLocal.replaceAll("^.*:", ""); if (false == sCkShort.equals(sCkLocalShort)) { System.out.println("ERROR: checksum mismatch: " + sFsPath + "\nhdfs = " + sCk + "\nlocal= " + sCkLocal); return; } } } catch (IOException e) { System.out.println("ERROR: could not open " + p + ": " + e); // System.exit(1) ; } } // ------------------------------------------------------------ /** get the list of tables that get synced from tripmaster -- * we'll want to ignore those. Stores the names in lowercase in * the provided hashset. * * @author tpalka@tripadvisor.com * @date Tue Nov 22 16:57:42 2011 */ void initializeTablesToIgnore(String sFilename) { m_ignoreTables = new TablesToIgnore(sFilename); } // ------------------------------------------------------------ static ExternalHDFSChecksumGenerator g_checksumGenerator; // ------------------------------------------------------------ /** * * @author tpalka@tripadvisor.com * @date Sat Jan 7 05:51:47 2012 */ MD5MD5CRC32FileChecksum getLocalFileChecksum(String sPath) { if (g_checksumGenerator == null) { g_checksumGenerator = new ExternalHDFSChecksumGenerator(); } // copied from checksum generator code long lBlockSize = N_BLOCK_SIZE; int bytesPerCRC = 512; try { return g_checksumGenerator.getLocalFilesystemHDFSStyleChecksum (sPath, bytesPerCRC, lBlockSize); } catch (Exception e) { System.out.println("ERROR getting local checksum: " + e.toString()); e.printStackTrace(); return null; } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.wellarchitected.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * A workload return object. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/wellarchitected-2020-03-31/Workload" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Workload implements Serializable, Cloneable, StructuredPojo { private String workloadId; private String workloadArn; private String workloadName; private String description; private String environment; private java.util.Date updatedAt; private java.util.List<String> accountIds; private java.util.List<String> awsRegions; private java.util.List<String> nonAwsRegions; private String architecturalDesign; private String reviewOwner; private java.util.Date reviewRestrictionDate; /** * <p> * Flag indicating whether the workload owner has acknowledged that the <i>Review owner</i> field is required. * </p> * <p> * If a <b>Review owner</b> is not added to the workload within 60 days of acknowledgement, access to the workload * is restricted until an owner is added. * </p> */ private Boolean isReviewOwnerUpdateAcknowledged; private String industryType; private String industry; private String notes; private String improvementStatus; private java.util.Map<String, Integer> riskCounts; private java.util.List<String> pillarPriorities; private java.util.List<String> lenses; private String owner; /** * <p> * The ID assigned to the share invitation. * </p> */ private String shareInvitationId; /** * <p> * The tags associated with the workload. * </p> */ private java.util.Map<String, String> tags; /** * @param workloadId */ public void setWorkloadId(String workloadId) { this.workloadId = workloadId; } /** * @return */ public String getWorkloadId() { return this.workloadId; } /** * @param workloadId * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withWorkloadId(String workloadId) { setWorkloadId(workloadId); return this; } /** * @param workloadArn */ public void setWorkloadArn(String workloadArn) { this.workloadArn = workloadArn; } /** * @return */ public String getWorkloadArn() { return this.workloadArn; } /** * @param workloadArn * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withWorkloadArn(String workloadArn) { setWorkloadArn(workloadArn); return this; } /** * @param workloadName */ public void setWorkloadName(String workloadName) { this.workloadName = workloadName; } /** * @return */ public String getWorkloadName() { return this.workloadName; } /** * @param workloadName * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withWorkloadName(String workloadName) { setWorkloadName(workloadName); return this; } /** * @param description */ public void setDescription(String description) { this.description = description; } /** * @return */ public String getDescription() { return this.description; } /** * @param description * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withDescription(String description) { setDescription(description); return this; } /** * @param environment * @see WorkloadEnvironment */ public void setEnvironment(String environment) { this.environment = environment; } /** * @return * @see WorkloadEnvironment */ public String getEnvironment() { return this.environment; } /** * @param environment * @return Returns a reference to this object so that method calls can be chained together. * @see WorkloadEnvironment */ public Workload withEnvironment(String environment) { setEnvironment(environment); return this; } /** * @param environment * @return Returns a reference to this object so that method calls can be chained together. * @see WorkloadEnvironment */ public Workload withEnvironment(WorkloadEnvironment environment) { this.environment = environment.toString(); return this; } /** * @param updatedAt */ public void setUpdatedAt(java.util.Date updatedAt) { this.updatedAt = updatedAt; } /** * @return */ public java.util.Date getUpdatedAt() { return this.updatedAt; } /** * @param updatedAt * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withUpdatedAt(java.util.Date updatedAt) { setUpdatedAt(updatedAt); return this; } /** * @return */ public java.util.List<String> getAccountIds() { return accountIds; } /** * @param accountIds */ public void setAccountIds(java.util.Collection<String> accountIds) { if (accountIds == null) { this.accountIds = null; return; } this.accountIds = new java.util.ArrayList<String>(accountIds); } /** * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setAccountIds(java.util.Collection)} or {@link #withAccountIds(java.util.Collection)} if you want to * override the existing values. * </p> * * @param accountIds * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withAccountIds(String... accountIds) { if (this.accountIds == null) { setAccountIds(new java.util.ArrayList<String>(accountIds.length)); } for (String ele : accountIds) { this.accountIds.add(ele); } return this; } /** * @param accountIds * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withAccountIds(java.util.Collection<String> accountIds) { setAccountIds(accountIds); return this; } /** * @return */ public java.util.List<String> getAwsRegions() { return awsRegions; } /** * @param awsRegions */ public void setAwsRegions(java.util.Collection<String> awsRegions) { if (awsRegions == null) { this.awsRegions = null; return; } this.awsRegions = new java.util.ArrayList<String>(awsRegions); } /** * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setAwsRegions(java.util.Collection)} or {@link #withAwsRegions(java.util.Collection)} if you want to * override the existing values. * </p> * * @param awsRegions * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withAwsRegions(String... awsRegions) { if (this.awsRegions == null) { setAwsRegions(new java.util.ArrayList<String>(awsRegions.length)); } for (String ele : awsRegions) { this.awsRegions.add(ele); } return this; } /** * @param awsRegions * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withAwsRegions(java.util.Collection<String> awsRegions) { setAwsRegions(awsRegions); return this; } /** * @return */ public java.util.List<String> getNonAwsRegions() { return nonAwsRegions; } /** * @param nonAwsRegions */ public void setNonAwsRegions(java.util.Collection<String> nonAwsRegions) { if (nonAwsRegions == null) { this.nonAwsRegions = null; return; } this.nonAwsRegions = new java.util.ArrayList<String>(nonAwsRegions); } /** * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setNonAwsRegions(java.util.Collection)} or {@link #withNonAwsRegions(java.util.Collection)} if you want * to override the existing values. * </p> * * @param nonAwsRegions * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withNonAwsRegions(String... nonAwsRegions) { if (this.nonAwsRegions == null) { setNonAwsRegions(new java.util.ArrayList<String>(nonAwsRegions.length)); } for (String ele : nonAwsRegions) { this.nonAwsRegions.add(ele); } return this; } /** * @param nonAwsRegions * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withNonAwsRegions(java.util.Collection<String> nonAwsRegions) { setNonAwsRegions(nonAwsRegions); return this; } /** * @param architecturalDesign */ public void setArchitecturalDesign(String architecturalDesign) { this.architecturalDesign = architecturalDesign; } /** * @return */ public String getArchitecturalDesign() { return this.architecturalDesign; } /** * @param architecturalDesign * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withArchitecturalDesign(String architecturalDesign) { setArchitecturalDesign(architecturalDesign); return this; } /** * @param reviewOwner */ public void setReviewOwner(String reviewOwner) { this.reviewOwner = reviewOwner; } /** * @return */ public String getReviewOwner() { return this.reviewOwner; } /** * @param reviewOwner * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withReviewOwner(String reviewOwner) { setReviewOwner(reviewOwner); return this; } /** * @param reviewRestrictionDate */ public void setReviewRestrictionDate(java.util.Date reviewRestrictionDate) { this.reviewRestrictionDate = reviewRestrictionDate; } /** * @return */ public java.util.Date getReviewRestrictionDate() { return this.reviewRestrictionDate; } /** * @param reviewRestrictionDate * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withReviewRestrictionDate(java.util.Date reviewRestrictionDate) { setReviewRestrictionDate(reviewRestrictionDate); return this; } /** * <p> * Flag indicating whether the workload owner has acknowledged that the <i>Review owner</i> field is required. * </p> * <p> * If a <b>Review owner</b> is not added to the workload within 60 days of acknowledgement, access to the workload * is restricted until an owner is added. * </p> * * @param isReviewOwnerUpdateAcknowledged * Flag indicating whether the workload owner has acknowledged that the <i>Review owner</i> field is * required.</p> * <p> * If a <b>Review owner</b> is not added to the workload within 60 days of acknowledgement, access to the * workload is restricted until an owner is added. */ public void setIsReviewOwnerUpdateAcknowledged(Boolean isReviewOwnerUpdateAcknowledged) { this.isReviewOwnerUpdateAcknowledged = isReviewOwnerUpdateAcknowledged; } /** * <p> * Flag indicating whether the workload owner has acknowledged that the <i>Review owner</i> field is required. * </p> * <p> * If a <b>Review owner</b> is not added to the workload within 60 days of acknowledgement, access to the workload * is restricted until an owner is added. * </p> * * @return Flag indicating whether the workload owner has acknowledged that the <i>Review owner</i> field is * required.</p> * <p> * If a <b>Review owner</b> is not added to the workload within 60 days of acknowledgement, access to the * workload is restricted until an owner is added. */ public Boolean getIsReviewOwnerUpdateAcknowledged() { return this.isReviewOwnerUpdateAcknowledged; } /** * <p> * Flag indicating whether the workload owner has acknowledged that the <i>Review owner</i> field is required. * </p> * <p> * If a <b>Review owner</b> is not added to the workload within 60 days of acknowledgement, access to the workload * is restricted until an owner is added. * </p> * * @param isReviewOwnerUpdateAcknowledged * Flag indicating whether the workload owner has acknowledged that the <i>Review owner</i> field is * required.</p> * <p> * If a <b>Review owner</b> is not added to the workload within 60 days of acknowledgement, access to the * workload is restricted until an owner is added. * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withIsReviewOwnerUpdateAcknowledged(Boolean isReviewOwnerUpdateAcknowledged) { setIsReviewOwnerUpdateAcknowledged(isReviewOwnerUpdateAcknowledged); return this; } /** * <p> * Flag indicating whether the workload owner has acknowledged that the <i>Review owner</i> field is required. * </p> * <p> * If a <b>Review owner</b> is not added to the workload within 60 days of acknowledgement, access to the workload * is restricted until an owner is added. * </p> * * @return Flag indicating whether the workload owner has acknowledged that the <i>Review owner</i> field is * required.</p> * <p> * If a <b>Review owner</b> is not added to the workload within 60 days of acknowledgement, access to the * workload is restricted until an owner is added. */ public Boolean isReviewOwnerUpdateAcknowledged() { return this.isReviewOwnerUpdateAcknowledged; } /** * @param industryType */ public void setIndustryType(String industryType) { this.industryType = industryType; } /** * @return */ public String getIndustryType() { return this.industryType; } /** * @param industryType * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withIndustryType(String industryType) { setIndustryType(industryType); return this; } /** * @param industry */ public void setIndustry(String industry) { this.industry = industry; } /** * @return */ public String getIndustry() { return this.industry; } /** * @param industry * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withIndustry(String industry) { setIndustry(industry); return this; } /** * @param notes */ public void setNotes(String notes) { this.notes = notes; } /** * @return */ public String getNotes() { return this.notes; } /** * @param notes * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withNotes(String notes) { setNotes(notes); return this; } /** * @param improvementStatus * @see WorkloadImprovementStatus */ public void setImprovementStatus(String improvementStatus) { this.improvementStatus = improvementStatus; } /** * @return * @see WorkloadImprovementStatus */ public String getImprovementStatus() { return this.improvementStatus; } /** * @param improvementStatus * @return Returns a reference to this object so that method calls can be chained together. * @see WorkloadImprovementStatus */ public Workload withImprovementStatus(String improvementStatus) { setImprovementStatus(improvementStatus); return this; } /** * @param improvementStatus * @return Returns a reference to this object so that method calls can be chained together. * @see WorkloadImprovementStatus */ public Workload withImprovementStatus(WorkloadImprovementStatus improvementStatus) { this.improvementStatus = improvementStatus.toString(); return this; } /** * @return */ public java.util.Map<String, Integer> getRiskCounts() { return riskCounts; } /** * @param riskCounts */ public void setRiskCounts(java.util.Map<String, Integer> riskCounts) { this.riskCounts = riskCounts; } /** * @param riskCounts * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withRiskCounts(java.util.Map<String, Integer> riskCounts) { setRiskCounts(riskCounts); return this; } /** * Add a single RiskCounts entry * * @see Workload#withRiskCounts * @returns a reference to this object so that method calls can be chained together. */ public Workload addRiskCountsEntry(String key, Integer value) { if (null == this.riskCounts) { this.riskCounts = new java.util.HashMap<String, Integer>(); } if (this.riskCounts.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.riskCounts.put(key, value); return this; } /** * Removes all the entries added into RiskCounts. * * @return Returns a reference to this object so that method calls can be chained together. */ public Workload clearRiskCountsEntries() { this.riskCounts = null; return this; } /** * @return */ public java.util.List<String> getPillarPriorities() { return pillarPriorities; } /** * @param pillarPriorities */ public void setPillarPriorities(java.util.Collection<String> pillarPriorities) { if (pillarPriorities == null) { this.pillarPriorities = null; return; } this.pillarPriorities = new java.util.ArrayList<String>(pillarPriorities); } /** * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setPillarPriorities(java.util.Collection)} or {@link #withPillarPriorities(java.util.Collection)} if you * want to override the existing values. * </p> * * @param pillarPriorities * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withPillarPriorities(String... pillarPriorities) { if (this.pillarPriorities == null) { setPillarPriorities(new java.util.ArrayList<String>(pillarPriorities.length)); } for (String ele : pillarPriorities) { this.pillarPriorities.add(ele); } return this; } /** * @param pillarPriorities * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withPillarPriorities(java.util.Collection<String> pillarPriorities) { setPillarPriorities(pillarPriorities); return this; } /** * @return */ public java.util.List<String> getLenses() { return lenses; } /** * @param lenses */ public void setLenses(java.util.Collection<String> lenses) { if (lenses == null) { this.lenses = null; return; } this.lenses = new java.util.ArrayList<String>(lenses); } /** * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setLenses(java.util.Collection)} or {@link #withLenses(java.util.Collection)} if you want to override the * existing values. * </p> * * @param lenses * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withLenses(String... lenses) { if (this.lenses == null) { setLenses(new java.util.ArrayList<String>(lenses.length)); } for (String ele : lenses) { this.lenses.add(ele); } return this; } /** * @param lenses * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withLenses(java.util.Collection<String> lenses) { setLenses(lenses); return this; } /** * @param owner */ public void setOwner(String owner) { this.owner = owner; } /** * @return */ public String getOwner() { return this.owner; } /** * @param owner * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withOwner(String owner) { setOwner(owner); return this; } /** * <p> * The ID assigned to the share invitation. * </p> * * @param shareInvitationId * The ID assigned to the share invitation. */ public void setShareInvitationId(String shareInvitationId) { this.shareInvitationId = shareInvitationId; } /** * <p> * The ID assigned to the share invitation. * </p> * * @return The ID assigned to the share invitation. */ public String getShareInvitationId() { return this.shareInvitationId; } /** * <p> * The ID assigned to the share invitation. * </p> * * @param shareInvitationId * The ID assigned to the share invitation. * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withShareInvitationId(String shareInvitationId) { setShareInvitationId(shareInvitationId); return this; } /** * <p> * The tags associated with the workload. * </p> * * @return The tags associated with the workload. */ public java.util.Map<String, String> getTags() { return tags; } /** * <p> * The tags associated with the workload. * </p> * * @param tags * The tags associated with the workload. */ public void setTags(java.util.Map<String, String> tags) { this.tags = tags; } /** * <p> * The tags associated with the workload. * </p> * * @param tags * The tags associated with the workload. * @return Returns a reference to this object so that method calls can be chained together. */ public Workload withTags(java.util.Map<String, String> tags) { setTags(tags); return this; } /** * Add a single Tags entry * * @see Workload#withTags * @returns a reference to this object so that method calls can be chained together. */ public Workload addTagsEntry(String key, String value) { if (null == this.tags) { this.tags = new java.util.HashMap<String, String>(); } if (this.tags.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.tags.put(key, value); return this; } /** * Removes all the entries added into Tags. * * @return Returns a reference to this object so that method calls can be chained together. */ public Workload clearTagsEntries() { this.tags = null; return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getWorkloadId() != null) sb.append("WorkloadId: ").append(getWorkloadId()).append(","); if (getWorkloadArn() != null) sb.append("WorkloadArn: ").append(getWorkloadArn()).append(","); if (getWorkloadName() != null) sb.append("WorkloadName: ").append(getWorkloadName()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getEnvironment() != null) sb.append("Environment: ").append(getEnvironment()).append(","); if (getUpdatedAt() != null) sb.append("UpdatedAt: ").append(getUpdatedAt()).append(","); if (getAccountIds() != null) sb.append("AccountIds: ").append(getAccountIds()).append(","); if (getAwsRegions() != null) sb.append("AwsRegions: ").append(getAwsRegions()).append(","); if (getNonAwsRegions() != null) sb.append("NonAwsRegions: ").append(getNonAwsRegions()).append(","); if (getArchitecturalDesign() != null) sb.append("ArchitecturalDesign: ").append(getArchitecturalDesign()).append(","); if (getReviewOwner() != null) sb.append("ReviewOwner: ").append(getReviewOwner()).append(","); if (getReviewRestrictionDate() != null) sb.append("ReviewRestrictionDate: ").append(getReviewRestrictionDate()).append(","); if (getIsReviewOwnerUpdateAcknowledged() != null) sb.append("IsReviewOwnerUpdateAcknowledged: ").append(getIsReviewOwnerUpdateAcknowledged()).append(","); if (getIndustryType() != null) sb.append("IndustryType: ").append(getIndustryType()).append(","); if (getIndustry() != null) sb.append("Industry: ").append(getIndustry()).append(","); if (getNotes() != null) sb.append("Notes: ").append(getNotes()).append(","); if (getImprovementStatus() != null) sb.append("ImprovementStatus: ").append(getImprovementStatus()).append(","); if (getRiskCounts() != null) sb.append("RiskCounts: ").append(getRiskCounts()).append(","); if (getPillarPriorities() != null) sb.append("PillarPriorities: ").append(getPillarPriorities()).append(","); if (getLenses() != null) sb.append("Lenses: ").append(getLenses()).append(","); if (getOwner() != null) sb.append("Owner: ").append(getOwner()).append(","); if (getShareInvitationId() != null) sb.append("ShareInvitationId: ").append(getShareInvitationId()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Workload == false) return false; Workload other = (Workload) obj; if (other.getWorkloadId() == null ^ this.getWorkloadId() == null) return false; if (other.getWorkloadId() != null && other.getWorkloadId().equals(this.getWorkloadId()) == false) return false; if (other.getWorkloadArn() == null ^ this.getWorkloadArn() == null) return false; if (other.getWorkloadArn() != null && other.getWorkloadArn().equals(this.getWorkloadArn()) == false) return false; if (other.getWorkloadName() == null ^ this.getWorkloadName() == null) return false; if (other.getWorkloadName() != null && other.getWorkloadName().equals(this.getWorkloadName()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getEnvironment() == null ^ this.getEnvironment() == null) return false; if (other.getEnvironment() != null && other.getEnvironment().equals(this.getEnvironment()) == false) return false; if (other.getUpdatedAt() == null ^ this.getUpdatedAt() == null) return false; if (other.getUpdatedAt() != null && other.getUpdatedAt().equals(this.getUpdatedAt()) == false) return false; if (other.getAccountIds() == null ^ this.getAccountIds() == null) return false; if (other.getAccountIds() != null && other.getAccountIds().equals(this.getAccountIds()) == false) return false; if (other.getAwsRegions() == null ^ this.getAwsRegions() == null) return false; if (other.getAwsRegions() != null && other.getAwsRegions().equals(this.getAwsRegions()) == false) return false; if (other.getNonAwsRegions() == null ^ this.getNonAwsRegions() == null) return false; if (other.getNonAwsRegions() != null && other.getNonAwsRegions().equals(this.getNonAwsRegions()) == false) return false; if (other.getArchitecturalDesign() == null ^ this.getArchitecturalDesign() == null) return false; if (other.getArchitecturalDesign() != null && other.getArchitecturalDesign().equals(this.getArchitecturalDesign()) == false) return false; if (other.getReviewOwner() == null ^ this.getReviewOwner() == null) return false; if (other.getReviewOwner() != null && other.getReviewOwner().equals(this.getReviewOwner()) == false) return false; if (other.getReviewRestrictionDate() == null ^ this.getReviewRestrictionDate() == null) return false; if (other.getReviewRestrictionDate() != null && other.getReviewRestrictionDate().equals(this.getReviewRestrictionDate()) == false) return false; if (other.getIsReviewOwnerUpdateAcknowledged() == null ^ this.getIsReviewOwnerUpdateAcknowledged() == null) return false; if (other.getIsReviewOwnerUpdateAcknowledged() != null && other.getIsReviewOwnerUpdateAcknowledged().equals(this.getIsReviewOwnerUpdateAcknowledged()) == false) return false; if (other.getIndustryType() == null ^ this.getIndustryType() == null) return false; if (other.getIndustryType() != null && other.getIndustryType().equals(this.getIndustryType()) == false) return false; if (other.getIndustry() == null ^ this.getIndustry() == null) return false; if (other.getIndustry() != null && other.getIndustry().equals(this.getIndustry()) == false) return false; if (other.getNotes() == null ^ this.getNotes() == null) return false; if (other.getNotes() != null && other.getNotes().equals(this.getNotes()) == false) return false; if (other.getImprovementStatus() == null ^ this.getImprovementStatus() == null) return false; if (other.getImprovementStatus() != null && other.getImprovementStatus().equals(this.getImprovementStatus()) == false) return false; if (other.getRiskCounts() == null ^ this.getRiskCounts() == null) return false; if (other.getRiskCounts() != null && other.getRiskCounts().equals(this.getRiskCounts()) == false) return false; if (other.getPillarPriorities() == null ^ this.getPillarPriorities() == null) return false; if (other.getPillarPriorities() != null && other.getPillarPriorities().equals(this.getPillarPriorities()) == false) return false; if (other.getLenses() == null ^ this.getLenses() == null) return false; if (other.getLenses() != null && other.getLenses().equals(this.getLenses()) == false) return false; if (other.getOwner() == null ^ this.getOwner() == null) return false; if (other.getOwner() != null && other.getOwner().equals(this.getOwner()) == false) return false; if (other.getShareInvitationId() == null ^ this.getShareInvitationId() == null) return false; if (other.getShareInvitationId() != null && other.getShareInvitationId().equals(this.getShareInvitationId()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getWorkloadId() == null) ? 0 : getWorkloadId().hashCode()); hashCode = prime * hashCode + ((getWorkloadArn() == null) ? 0 : getWorkloadArn().hashCode()); hashCode = prime * hashCode + ((getWorkloadName() == null) ? 0 : getWorkloadName().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getEnvironment() == null) ? 0 : getEnvironment().hashCode()); hashCode = prime * hashCode + ((getUpdatedAt() == null) ? 0 : getUpdatedAt().hashCode()); hashCode = prime * hashCode + ((getAccountIds() == null) ? 0 : getAccountIds().hashCode()); hashCode = prime * hashCode + ((getAwsRegions() == null) ? 0 : getAwsRegions().hashCode()); hashCode = prime * hashCode + ((getNonAwsRegions() == null) ? 0 : getNonAwsRegions().hashCode()); hashCode = prime * hashCode + ((getArchitecturalDesign() == null) ? 0 : getArchitecturalDesign().hashCode()); hashCode = prime * hashCode + ((getReviewOwner() == null) ? 0 : getReviewOwner().hashCode()); hashCode = prime * hashCode + ((getReviewRestrictionDate() == null) ? 0 : getReviewRestrictionDate().hashCode()); hashCode = prime * hashCode + ((getIsReviewOwnerUpdateAcknowledged() == null) ? 0 : getIsReviewOwnerUpdateAcknowledged().hashCode()); hashCode = prime * hashCode + ((getIndustryType() == null) ? 0 : getIndustryType().hashCode()); hashCode = prime * hashCode + ((getIndustry() == null) ? 0 : getIndustry().hashCode()); hashCode = prime * hashCode + ((getNotes() == null) ? 0 : getNotes().hashCode()); hashCode = prime * hashCode + ((getImprovementStatus() == null) ? 0 : getImprovementStatus().hashCode()); hashCode = prime * hashCode + ((getRiskCounts() == null) ? 0 : getRiskCounts().hashCode()); hashCode = prime * hashCode + ((getPillarPriorities() == null) ? 0 : getPillarPriorities().hashCode()); hashCode = prime * hashCode + ((getLenses() == null) ? 0 : getLenses().hashCode()); hashCode = prime * hashCode + ((getOwner() == null) ? 0 : getOwner().hashCode()); hashCode = prime * hashCode + ((getShareInvitationId() == null) ? 0 : getShareInvitationId().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public Workload clone() { try { return (Workload) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.wellarchitected.model.transform.WorkloadMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package com.gagnepain.cashcash.domain; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.FetchType; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonManagedReference; import com.gagnepain.cashcash.domain.enumeration.CashAccountType; /** * A CashAccount. */ @Entity @Table(name = "cash_account") @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE) public class CashAccount extends CashOwnedResource { private static final long serialVersionUID = 1L; @NotNull @Size(min = 2, max = 80) @Column(name = "name", length = 80, nullable = false) private String name; @NotNull @Enumerated(EnumType.STRING) @Column(name = "type", nullable = false) private CashAccountType type; @NotNull @Min(value = 1) @Column(name = "level", nullable = false) private int level; @Size(min = 1, max = 10) @Column(name = "code", length = 10) private String code; @NotNull @Column(name = "is_multi_currency", nullable = false) private boolean isMultiCurrency; @Column(name = "bank_id") private String bankId; @Column(name = "branch_id") private String branchId; @Column(name = "account_number") private String accountNumber; @Column(name = "account_key") private String accountKey; @Column(name = "iban") private String iban; @Column(name = "bic") private String bic; @ManyToOne(fetch = FetchType.LAZY) private CashCsvConfig csvConfig; @Column(name = "csv_config_id", updatable = false, insertable = false) private Long csvConfigId; @ManyToOne @NotNull private CashCurrency currency; @Column(name = "currency_id", updatable = false, insertable = false) private Long currencyId; @ManyToOne @JsonBackReference private CashAccount parentAccount; @Column(name = "parent_account_id", updatable = false, insertable = false) private Long parentAccountId; @OneToMany(cascade = CascadeType.PERSIST, mappedBy = "parentAccount", fetch = FetchType.LAZY) @JsonManagedReference private Set<CashAccount> childAccountList; @Override public List<CashOwnedResource> getOwnedResources() { final List<CashOwnedResource> independantResources = super.getOwnedResources(); if (parentAccount != null) { independantResources.add(parentAccount); } if (csvConfig != null) { independantResources.add(csvConfig); } return independantResources; } public String getName() { return name; } public void setName(final String name) { this.name = name; } public CashAccountType getType() { return type; } public void setType(final CashAccountType type) { this.type = type; } public int getLevel() { return level; } public void setLevel(final int level) { this.level = level; } public String getCode() { return code; } public void setCode(final String code) { this.code = code; } public boolean isMultiCurrency() { return isMultiCurrency; } public void setIsMultiCurrency(final boolean isMultiCurrency) { this.isMultiCurrency = isMultiCurrency; } public String getBankId() { return bankId; } public void setBankId(final String bankId) { this.bankId = bankId; } public String getBranchId() { return branchId; } public void setBranchId(final String branchId) { this.branchId = branchId; } public String getAccountNumber() { return accountNumber; } public void setAccountNumber(final String accountNumber) { this.accountNumber = accountNumber; } public String getAccountKey() { return accountKey; } public void setAccountKey(final String accountKey) { this.accountKey = accountKey; } public String getIban() { return iban; } public void setIban(final String iban) { this.iban = iban; } public String getBic() { return bic; } public void setBic(final String bic) { this.bic = bic; } public CashCsvConfig getCsvConfig() { return csvConfig; } public void setCsvConfig(final CashCsvConfig csvConfig) { this.csvConfig = csvConfig; } public CashCurrency getCurrency() { return currency; } public void setCurrency(final CashCurrency currency) { this.currency = currency; } public CashAccount getParentAccount() { return parentAccount; } public void setParentAccount(final CashAccount parentAccount) { this.parentAccount = parentAccount; this.parentAccount.getChildAccountList() .add(this); } public Long getParentAccountId() { return parentAccountId; } public void setParentAccountId(final Long parentAccountId) { this.parentAccountId = parentAccountId; } public Long getCsvConfigId() { return csvConfigId; } public void setCsvConfigId(final Long csvConfigId) { this.csvConfigId = csvConfigId; } public Long getCurrencyId() { return currencyId; } public void setCurrencyId(final Long currencyId) { this.currencyId = currencyId; } public Set<CashAccount> getChildAccountList() { if (childAccountList == null) { childAccountList = new HashSet<>(); } return childAccountList; } public void setChildAccountList(final Set<CashAccount> childAccountList) { this.childAccountList = childAccountList; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final CashAccount cashAccount = (CashAccount) o; if (cashAccount.getId() == null || getId() == null) { return false; } return Objects.equals(getId(), cashAccount.getId()); } @Override public int hashCode() { return Objects.hashCode(getId()); } @Override public String toString() { return "CashAccount{" + "name='" + name + '\'' + ", type=" + type + ", level=" + level + ", code='" + code + '\'' + ", isMultiCurrency=" + isMultiCurrency + ", bankId='" + bankId + '\'' + ", branchId='" + branchId + '\'' + ", accountNumber='" + accountNumber + '\'' + ", accountKey='" + accountKey + '\'' + ", iban='" + iban + '\'' + ", bic='" + bic + '\'' + ", csvConfig=" + csvConfig + ", currency=" + currency + ", parentAccount=" + parentAccount + '}'; } }
/* * Copyright (C) 2014 The Android Open Source Project * Copyright (c) 2000, 2008, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ // -- This file was mechanically generated: Do not edit! -- // package java.nio.charset; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.BufferOverflowException; import java.nio.BufferUnderflowException; import java.nio.charset.CoderMalfunctionError; // javadoc /** * An engine that can transform a sequence of bytes in a specific charset into a sequence of * sixteen-bit Unicode characters. * * <a name="steps"> * * <p> The input byte sequence is provided in a byte buffer or a series * of such buffers. The output character sequence is written to a character buffer * or a series of such buffers. A decoder should always be used by making * the following sequence of method invocations, hereinafter referred to as a * <i>decoding operation</i>: * * <ol> * * <li><p> Reset the decoder via the {@link #reset reset} method, unless it * has not been used before; </p></li> * * <li><p> Invoke the {@link #decode decode} method zero or more times, as * long as additional input may be available, passing <tt>false</tt> for the * <tt>endOfInput</tt> argument and filling the input buffer and flushing the * output buffer between invocations; </p></li> * * <li><p> Invoke the {@link #decode decode} method one final time, passing * <tt>true</tt> for the <tt>endOfInput</tt> argument; and then </p></li> * * <li><p> Invoke the {@link #flush flush} method so that the decoder can * flush any internal state to the output buffer. </p></li> * * </ol> * * Each invocation of the {@link #decode decode} method will decode as many * bytes as possible from the input buffer, writing the resulting characters * to the output buffer. The {@link #decode decode} method returns when more * input is required, when there is not enough room in the output buffer, or * when a decoding error has occurred. In each case a {@link CoderResult} * object is returned to describe the reason for termination. An invoker can * examine this object and fill the input buffer, flush the output buffer, or * attempt to recover from a decoding error, as appropriate, and try again. * * <a name="ce"> * * <p> There are two general types of decoding errors. If the input byte * sequence is not legal for this charset then the input is considered <i>malformed</i>. If * the input byte sequence is legal but cannot be mapped to a valid * Unicode character then an <i>unmappable character</i> has been encountered. * * <a name="cae"> * * <p> How a decoding error is handled depends upon the action requested for * that type of error, which is described by an instance of the {@link * CodingErrorAction} class. The possible error actions are to {@link * CodingErrorAction#IGNORE </code>ignore<code>} the erroneous input, {@link * CodingErrorAction#REPORT </code>report<code>} the error to the invoker via * the returned {@link CoderResult} object, or {@link CodingErrorAction#REPLACE * </code>replace<code>} the erroneous input with the current value of the * replacement string. The replacement * * has the initial value <tt>"&#92;uFFFD"</tt>; * * its value may be changed via the {@link #replaceWith(java.lang.String) * replaceWith} method. * * <p> The default action for malformed-input and unmappable-character errors * is to {@link CodingErrorAction#REPORT </code>report<code>} them. The * malformed-input error action may be changed via the {@link * #onMalformedInput(CodingErrorAction) onMalformedInput} method; the * unmappable-character action may be changed via the {@link * #onUnmappableCharacter(CodingErrorAction) onUnmappableCharacter} method. * * <p> This class is designed to handle many of the details of the decoding * process, including the implementation of error actions. A decoder for a * specific charset, which is a concrete subclass of this class, need only * implement the abstract {@link #decodeLoop decodeLoop} method, which * encapsulates the basic decoding loop. A subclass that maintains internal * state should, additionally, override the {@link #implFlush implFlush} and * {@link #implReset implReset} methods. * * <p> Instances of this class are not safe for use by multiple concurrent * threads. </p> * * * @author Mark Reinhold * @author JSR-51 Expert Group * @since 1.4 * * @see ByteBuffer * @see CharBuffer * @see Charset * @see CharsetEncoder */ public abstract class CharsetDecoder { private final Charset charset; private final float averageCharsPerByte; private final float maxCharsPerByte; private String replacement; private CodingErrorAction malformedInputAction = CodingErrorAction.REPORT; private CodingErrorAction unmappableCharacterAction = CodingErrorAction.REPORT; // Internal states // private static final int ST_RESET = 0; private static final int ST_CODING = 1; private static final int ST_END = 2; private static final int ST_FLUSHED = 3; private int state = ST_RESET; private static String stateNames[] = { "RESET", "CODING", "CODING_END", "FLUSHED" }; /** * Initializes a new decoder. The new decoder will have the given * chars-per-byte and replacement values. </p> * * @param averageCharsPerByte * A positive float value indicating the expected number of * characters that will be produced for each input byte * * @param maxCharsPerByte * A positive float value indicating the maximum number of * characters that will be produced for each input byte * * @param replacement * The initial replacement; must not be <tt>null</tt>, must have * non-zero length, must not be longer than maxCharsPerByte, * and must be {@link #isLegalReplacement </code>legal<code>} * * @throws IllegalArgumentException * If the preconditions on the parameters do not hold */ private CharsetDecoder(Charset cs, float averageCharsPerByte, float maxCharsPerByte, String replacement) { this.charset = cs; if (averageCharsPerByte <= 0.0f) throw new IllegalArgumentException("Non-positive " + "averageCharsPerByte"); if (maxCharsPerByte <= 0.0f) throw new IllegalArgumentException("Non-positive " + "maxCharsPerByte"); /* J2ObjC removed. if (!Charset.atBugLevel("1.4")) {*/ if (averageCharsPerByte > maxCharsPerByte) throw new IllegalArgumentException("averageCharsPerByte" + " exceeds " + "maxCharsPerByte"); //} this.replacement = replacement; this.averageCharsPerByte = averageCharsPerByte; this.maxCharsPerByte = maxCharsPerByte; /* ----- BEGIN android ----- replaceWith(replacement); ----- END android ----- */ } /** * Initializes a new decoder. The new decoder will have the given * chars-per-byte values and its replacement will be the * string <tt>"&#92;uFFFD"</tt>. </p> * * @param averageCharsPerByte * A positive float value indicating the expected number of * characters that will be produced for each input byte * * @param maxCharsPerByte * A positive float value indicating the maximum number of * characters that will be produced for each input byte * * @throws IllegalArgumentException * If the preconditions on the parameters do not hold */ protected CharsetDecoder(Charset cs, float averageCharsPerByte, float maxCharsPerByte) { this(cs, averageCharsPerByte, maxCharsPerByte, "\uFFFD"); } /** * Returns the charset that created this decoder. </p> * * @return This decoder's charset */ public final Charset charset() { return charset; } /** * Returns this decoder's replacement value. </p> * * @return This decoder's current replacement, * which is never <tt>null</tt> and is never empty */ public final String replacement() { return replacement; } /** * Changes this decoder's replacement value. * * <p> This method invokes the {@link #implReplaceWith implReplaceWith} * method, passing the new replacement, after checking that the new * replacement is acceptable. </p> * * @param newReplacement * * The new replacement; must not be <tt>null</tt> * and must have non-zero length * * @return This decoder * * @throws IllegalArgumentException * If the preconditions on the parameter do not hold */ public final CharsetDecoder replaceWith(String newReplacement) { if (newReplacement == null) throw new IllegalArgumentException("Null replacement"); int len = newReplacement.length(); if (len == 0) throw new IllegalArgumentException("Empty replacement"); if (len > maxCharsPerByte) throw new IllegalArgumentException("Replacement too long"); this.replacement = newReplacement; implReplaceWith(newReplacement); return this; } /** * Reports a change to this decoder's replacement value. * * <p> The default implementation of this method does nothing. This method * should be overridden by decoders that require notification of changes to * the replacement. </p> * * @param newReplacement */ protected void implReplaceWith(String newReplacement) { } /** * Returns this decoder's current action for malformed-input errors. </p> * * @return The current malformed-input action, which is never <tt>null</tt> */ public CodingErrorAction malformedInputAction() { return malformedInputAction; } /** * Changes this decoder's action for malformed-input errors. </p> * * <p> This method invokes the {@link #implOnMalformedInput * implOnMalformedInput} method, passing the new action. </p> * * @param newAction The new action; must not be <tt>null</tt> * * @return This decoder * * @throws IllegalArgumentException * If the precondition on the parameter does not hold */ public final CharsetDecoder onMalformedInput(CodingErrorAction newAction) { if (newAction == null) throw new IllegalArgumentException("Null action"); malformedInputAction = newAction; implOnMalformedInput(newAction); return this; } /** * Reports a change to this decoder's malformed-input action. * * <p> The default implementation of this method does nothing. This method * should be overridden by decoders that require notification of changes to * the malformed-input action. </p> */ protected void implOnMalformedInput(CodingErrorAction newAction) { } /** * Returns this decoder's current action for unmappable-character errors. * </p> * * @return The current unmappable-character action, which is never * <tt>null</tt> */ public CodingErrorAction unmappableCharacterAction() { return unmappableCharacterAction; } /** * Changes this decoder's action for unmappable-character errors. * * <p> This method invokes the {@link #implOnUnmappableCharacter * implOnUnmappableCharacter} method, passing the new action. </p> * * @param newAction The new action; must not be <tt>null</tt> * * @return This decoder * * @throws IllegalArgumentException * If the precondition on the parameter does not hold */ public final CharsetDecoder onUnmappableCharacter(CodingErrorAction newAction) { if (newAction == null) throw new IllegalArgumentException("Null action"); unmappableCharacterAction = newAction; implOnUnmappableCharacter(newAction); return this; } /** * Reports a change to this decoder's unmappable-character action. * * <p> The default implementation of this method does nothing. This method * should be overridden by decoders that require notification of changes to * the unmappable-character action. </p> */ protected void implOnUnmappableCharacter(CodingErrorAction newAction) { } /** * Returns the average number of characters that will be produced for each * byte of input. This heuristic value may be used to estimate the size * of the output buffer required for a given input sequence. </p> * * @return The average number of characters produced * per byte of input */ public final float averageCharsPerByte() { return averageCharsPerByte; } /** * Returns the maximum number of characters that will be produced for each * byte of input. This value may be used to compute the worst-case size * of the output buffer required for a given input sequence. </p> * * @return The maximum number of characters that will be produced per * byte of input */ public final float maxCharsPerByte() { return maxCharsPerByte; } /** * Decodes as many bytes as possible from the given input buffer, * writing the results to the given output buffer. * * <p> The buffers are read from, and written to, starting at their current * positions. At most {@link Buffer#remaining in.remaining()} bytes * will be read and at most {@link Buffer#remaining out.remaining()} * characters will be written. The buffers' positions will be advanced to * reflect the bytes read and the characters written, but their marks and * limits will not be modified. * * <p> In addition to reading bytes from the input buffer and writing * characters to the output buffer, this method returns a {@link CoderResult} * object to describe its reason for termination: * * <ul> * * <li><p> {@link CoderResult#UNDERFLOW} indicates that as much of the * input buffer as possible has been decoded. If there is no further * input then the invoker can proceed to the next step of the * <a href="#steps">decoding operation</a>. Otherwise this method * should be invoked again with further input. </p></li> * * <li><p> {@link CoderResult#OVERFLOW} indicates that there is * insufficient space in the output buffer to decode any more bytes. * This method should be invoked again with an output buffer that has * more {@linkplain Buffer#remaining remaining} characters. This is * typically done by draining any decoded characters from the output * buffer. </p></li> * * <li><p> A {@link CoderResult#malformedForLength * </code>malformed-input<code>} result indicates that a malformed-input * error has been detected. The malformed bytes begin at the input * buffer's (possibly incremented) position; the number of malformed * bytes may be determined by invoking the result object's {@link * CoderResult#length() length} method. This case applies only if the * {@link #onMalformedInput </code>malformed action<code>} of this decoder * is {@link CodingErrorAction#REPORT}; otherwise the malformed input * will be ignored or replaced, as requested. </p></li> * * <li><p> An {@link CoderResult#unmappableForLength * </code>unmappable-character<code>} result indicates that an * unmappable-character error has been detected. The bytes that * decode the unmappable character begin at the input buffer's (possibly * incremented) position; the number of such bytes may be determined * by invoking the result object's {@link CoderResult#length() length} * method. This case applies only if the {@link #onUnmappableCharacter * </code>unmappable action<code>} of this decoder is {@link * CodingErrorAction#REPORT}; otherwise the unmappable character will be * ignored or replaced, as requested. </p></li> * * </ul> * * In any case, if this method is to be reinvoked in the same decoding * operation then care should be taken to preserve any bytes remaining * in the input buffer so that they are available to the next invocation. * * <p> The <tt>endOfInput</tt> parameter advises this method as to whether * the invoker can provide further input beyond that contained in the given * input buffer. If there is a possibility of providing additional input * then the invoker should pass <tt>false</tt> for this parameter; if there * is no possibility of providing further input then the invoker should * pass <tt>true</tt>. It is not erroneous, and in fact it is quite * common, to pass <tt>false</tt> in one invocation and later discover that * no further input was actually available. It is critical, however, that * the final invocation of this method in a sequence of invocations always * pass <tt>true</tt> so that any remaining undecoded input will be treated * as being malformed. * * <p> This method works by invoking the {@link #decodeLoop decodeLoop} * method, interpreting its results, handling error conditions, and * reinvoking it as necessary. </p> * * * @param in * The input byte buffer * * @param out * The output character buffer * * @param endOfInput * <tt>true</tt> if, and only if, the invoker can provide no * additional input bytes beyond those in the given buffer * * @return A coder-result object describing the reason for termination * * @throws IllegalStateException * If a decoding operation is already in progress and the previous * step was an invocation neither of the {@link #reset reset} * method, nor of this method with a value of <tt>false</tt> for * the <tt>endOfInput</tt> parameter, nor of this method with a * value of <tt>true</tt> for the <tt>endOfInput</tt> parameter * but a return value indicating an incomplete decoding operation * * @throws CoderMalfunctionError * If an invocation of the decodeLoop method threw * an unexpected exception */ public final CoderResult decode(ByteBuffer in, CharBuffer out, boolean endOfInput) { int newState = endOfInput ? ST_END : ST_CODING; if ((state != ST_RESET) && (state != ST_CODING) && !(endOfInput && (state == ST_END))) throwIllegalStateException(state, newState); state = newState; for (;;) { CoderResult cr; try { cr = decodeLoop(in, out); } catch (BufferUnderflowException x) { throw new CoderMalfunctionError(x); } catch (BufferOverflowException x) { throw new CoderMalfunctionError(x); } if (cr.isOverflow()) return cr; if (cr.isUnderflow()) { if (endOfInput && in.hasRemaining()) { cr = CoderResult.malformedForLength(in.remaining()); // Fall through to malformed-input case } else { return cr; } } CodingErrorAction action = null; if (cr.isMalformed()) action = malformedInputAction; else if (cr.isUnmappable()) action = unmappableCharacterAction; else assert false : cr.toString(); if (action == CodingErrorAction.REPORT) return cr; if (action == CodingErrorAction.REPLACE) { if (out.remaining() < replacement.length()) return CoderResult.OVERFLOW; out.put(replacement); } if ((action == CodingErrorAction.IGNORE) || (action == CodingErrorAction.REPLACE)) { // Skip erroneous input either way in.position(in.position() + cr.length()); continue; } assert false; } } /** * Flushes this decoder. * * <p> Some decoders maintain internal state and may need to write some * final characters to the output buffer once the overall input sequence has * been read. * * <p> Any additional output is written to the output buffer beginning at * its current position. At most {@link Buffer#remaining out.remaining()} * characters will be written. The buffer's position will be advanced * appropriately, but its mark and limit will not be modified. * * <p> If this method completes successfully then it returns {@link * CoderResult#UNDERFLOW}. If there is insufficient room in the output * buffer then it returns {@link CoderResult#OVERFLOW}. If this happens * then this method must be invoked again, with an output buffer that has * more room, in order to complete the current <a href="#steps">decoding * operation</a>. * * <p> If this decoder has already been flushed then invoking this method * has no effect. * * <p> This method invokes the {@link #implFlush implFlush} method to * perform the actual flushing operation. </p> * * @param out * The output character buffer * * @return A coder-result object, either {@link CoderResult#UNDERFLOW} or * {@link CoderResult#OVERFLOW} * * @throws IllegalStateException * If the previous step of the current decoding operation was an * invocation neither of the {@link #flush flush} method nor of * the three-argument {@link * #decode(ByteBuffer,CharBuffer,boolean) decode} method * with a value of <tt>true</tt> for the <tt>endOfInput</tt> * parameter */ public final CoderResult flush(CharBuffer out) { if (state == ST_END) { CoderResult cr = implFlush(out); if (cr.isUnderflow()) state = ST_FLUSHED; return cr; } if (state != ST_FLUSHED) throwIllegalStateException(state, ST_FLUSHED); return CoderResult.UNDERFLOW; // Already flushed } /** * Flushes this decoder. * * <p> The default implementation of this method does nothing, and always * returns {@link CoderResult#UNDERFLOW}. This method should be overridden * by decoders that may need to write final characters to the output buffer * once the entire input sequence has been read. </p> * * @param out * The output character buffer * * @return A coder-result object, either {@link CoderResult#UNDERFLOW} or * {@link CoderResult#OVERFLOW} */ protected CoderResult implFlush(CharBuffer out) { return CoderResult.UNDERFLOW; } /** * Resets this decoder, clearing any internal state. * * <p> This method resets charset-independent state and also invokes the * {@link #implReset() implReset} method in order to perform any * charset-specific reset actions. </p> * * @return This decoder * */ public final CharsetDecoder reset() { implReset(); state = ST_RESET; return this; } /** * Resets this decoder, clearing any charset-specific internal state. * * <p> The default implementation of this method does nothing. This method * should be overridden by decoders that maintain internal state. </p> */ protected void implReset() { } /** * Decodes one or more bytes into one or more characters. * * <p> This method encapsulates the basic decoding loop, decoding as many * bytes as possible until it either runs out of input, runs out of room * in the output buffer, or encounters a decoding error. This method is * invoked by the {@link #decode decode} method, which handles result * interpretation and error recovery. * * <p> The buffers are read from, and written to, starting at their current * positions. At most {@link Buffer#remaining in.remaining()} bytes * will be read, and at most {@link Buffer#remaining out.remaining()} * characters will be written. The buffers' positions will be advanced to * reflect the bytes read and the characters written, but their marks and * limits will not be modified. * * <p> This method returns a {@link CoderResult} object to describe its * reason for termination, in the same manner as the {@link #decode decode} * method. Most implementations of this method will handle decoding errors * by returning an appropriate result object for interpretation by the * {@link #decode decode} method. An optimized implementation may instead * examine the relevant error action and implement that action itself. * * <p> An implementation of this method may perform arbitrary lookahead by * returning {@link CoderResult#UNDERFLOW} until it receives sufficient * input. </p> * * @param in * The input byte buffer * * @param out * The output character buffer * * @return A coder-result object describing the reason for termination */ protected abstract CoderResult decodeLoop(ByteBuffer in, CharBuffer out); /** * Convenience method that decodes the remaining content of a single input * byte buffer into a newly-allocated character buffer. * * <p> This method implements an entire <a href="#steps">decoding * operation</a>; that is, it resets this decoder, then it decodes the * bytes in the given byte buffer, and finally it flushes this * decoder. This method should therefore not be invoked if a decoding * operation is already in progress. </p> * * @param in * The input byte buffer * * @return A newly-allocated character buffer containing the result of the * decoding operation. The buffer's position will be zero and its * limit will follow the last character written. * * @throws IllegalStateException * If a decoding operation is already in progress * * @throws MalformedInputException * If the byte sequence starting at the input buffer's current * position is not legal for this charset and the current malformed-input action * is {@link CodingErrorAction#REPORT} * * @throws UnmappableCharacterException * If the byte sequence starting at the input buffer's current * position cannot be mapped to an equivalent character sequence and * the current unmappable-character action is {@link * CodingErrorAction#REPORT} */ public final CharBuffer decode(ByteBuffer in) throws CharacterCodingException { int n = (int)(in.remaining() * averageCharsPerByte()); CharBuffer out = CharBuffer.allocate(n); if ((n == 0) && (in.remaining() == 0)) return out; reset(); for (;;) { CoderResult cr = in.hasRemaining() ? decode(in, out, true) : CoderResult.UNDERFLOW; if (cr.isUnderflow()) cr = flush(out); if (cr.isUnderflow()) break; if (cr.isOverflow()) { n = 2*n + 1; // Ensure progress; n might be 0! CharBuffer o = CharBuffer.allocate(n); out.flip(); o.put(out); out = o; continue; } cr.throwException(); } out.flip(); return out; } /** * Tells whether or not this decoder implements an auto-detecting charset. * * <p> The default implementation of this method always returns * <tt>false</tt>; it should be overridden by auto-detecting decoders to * return <tt>true</tt>. </p> * * @return <tt>true</tt> if, and only if, this decoder implements an * auto-detecting charset */ public boolean isAutoDetecting() { return false; } /** * Tells whether or not this decoder has yet detected a * charset&nbsp;&nbsp;<i>(optional operation)</i>. * * <p> If this decoder implements an auto-detecting charset then at a * single point during a decoding operation this method may start returning * <tt>true</tt> to indicate that a specific charset has been detected in * the input byte sequence. Once this occurs, the {@link #detectedCharset * detectedCharset} method may be invoked to retrieve the detected charset. * * <p> That this method returns <tt>false</tt> does not imply that no bytes * have yet been decoded. Some auto-detecting decoders are capable of * decoding some, or even all, of an input byte sequence without fixing on * a particular charset. * * <p> The default implementation of this method always throws an {@link * UnsupportedOperationException}; it should be overridden by * auto-detecting decoders to return <tt>true</tt> once the input charset * has been determined. </p> * * @return <tt>true</tt> if, and only if, this decoder has detected a * specific charset * * @throws UnsupportedOperationException * If this decoder does not implement an auto-detecting charset */ public boolean isCharsetDetected() { throw new UnsupportedOperationException(); } /** * Retrieves the charset that was detected by this * decoder&nbsp;&nbsp;<i>(optional operation)</i>. * * <p> If this decoder implements an auto-detecting charset then this * method returns the actual charset once it has been detected. After that * point, this method returns the same value for the duration of the * current decoding operation. If not enough input bytes have yet been * read to determine the actual charset then this method throws an {@link * IllegalStateException}. * * <p> The default implementation of this method always throws an {@link * UnsupportedOperationException}; it should be overridden by * auto-detecting decoders to return the appropriate value. </p> * * @return The charset detected by this auto-detecting decoder, * or <tt>null</tt> if the charset has not yet been determined * * @throws IllegalStateException * If insufficient bytes have been read to determine a charset * * @throws UnsupportedOperationException * If this decoder does not implement an auto-detecting charset */ public Charset detectedCharset() { throw new UnsupportedOperationException(); } private void throwIllegalStateException(int from, int to) { throw new IllegalStateException("Current state = " + stateNames[from] + ", new state = " + stateNames[to]); } }
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.io; import com.google.common.base.Charsets; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.EOFException; import java.io.File; import java.io.FileOutputStream; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.RandomAccessFile; import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.util.Arrays; /** * Unit test for {@link ByteStreams}. * * @author Chris Nokleberg */ public class ByteStreamsTest extends IoTestCase { public void testCopyChannel() throws IOException { byte[] expected = newPreFilledByteArray(100); ByteArrayOutputStream out = new ByteArrayOutputStream(); WritableByteChannel outChannel = Channels.newChannel(out); ReadableByteChannel inChannel = Channels.newChannel(new ByteArrayInputStream(expected)); ByteStreams.copy(inChannel, outChannel); assertEquals(expected, out.toByteArray()); } public void testCopyFileChannel() throws IOException { final int chunkSize = 14407; // Random prime, unlikely to match any internal chunk size ByteArrayOutputStream out = new ByteArrayOutputStream(); WritableByteChannel outChannel = Channels.newChannel(out); File testFile = createTempFile(); FileOutputStream fos = new FileOutputStream(testFile); byte[] dummyData = newPreFilledByteArray(chunkSize); try { for (int i = 0; i < 500; i++) { fos.write(dummyData); } } finally { fos.close(); } ReadableByteChannel inChannel = new RandomAccessFile(testFile, "r").getChannel(); try { ByteStreams.copy(inChannel, outChannel); } finally { inChannel.close(); } byte[] actual = out.toByteArray(); for (int i = 0; i < 500 * chunkSize; i += chunkSize) { assertEquals(dummyData, Arrays.copyOfRange(actual, i, i + chunkSize)); } } public void testReadFully() throws IOException { byte[] b = new byte[10]; try { ByteStreams.readFully(newTestStream(10), null, 0, 10); fail("expected exception"); } catch (NullPointerException e) { } try { ByteStreams.readFully(null, b, 0, 10); fail("expected exception"); } catch (NullPointerException e) { } try { ByteStreams.readFully(newTestStream(10), b, -1, 10); fail("expected exception"); } catch (IndexOutOfBoundsException e) { } try { ByteStreams.readFully(newTestStream(10), b, 0, -1); fail("expected exception"); } catch (IndexOutOfBoundsException e) { } try { ByteStreams.readFully(newTestStream(10), b, 0, -1); fail("expected exception"); } catch (IndexOutOfBoundsException e) { } try { ByteStreams.readFully(newTestStream(10), b, 2, 10); fail("expected exception"); } catch (IndexOutOfBoundsException e) { } try { ByteStreams.readFully(newTestStream(5), b, 0, 10); fail("expected exception"); } catch (EOFException e) { } Arrays.fill(b, (byte) 0); ByteStreams.readFully(newTestStream(10), b, 0, 0); assertEquals(new byte[10], b); Arrays.fill(b, (byte) 0); ByteStreams.readFully(newTestStream(10), b, 0, 10); assertEquals(newPreFilledByteArray(10), b); Arrays.fill(b, (byte) 0); ByteStreams.readFully(newTestStream(10), b, 0, 5); assertEquals(new byte[]{0, 1, 2, 3, 4, 0, 0, 0, 0, 0}, b); } public void testSkipFully() throws IOException { byte[] bytes = newPreFilledByteArray(100); skipHelper(0, 0, new ByteArrayInputStream(bytes)); skipHelper(50, 50, new ByteArrayInputStream(bytes)); skipHelper(50, 50, new SlowSkipper(new ByteArrayInputStream(bytes), 1)); skipHelper(50, 50, new SlowSkipper(new ByteArrayInputStream(bytes), 0)); skipHelper(100, -1, new ByteArrayInputStream(bytes)); try { skipHelper(101, 0, new ByteArrayInputStream(bytes)); fail("expected exception"); } catch (EOFException e) { } } private static void skipHelper(long n, int expect, InputStream in) throws IOException { ByteStreams.skipFully(in, n); assertEquals(expect, in.read()); in.close(); } private static final byte[] bytes = new byte[] { 0x12, 0x34, 0x56, 0x78, 0x76, 0x54, 0x32, 0x10 }; public void testNewDataInput_empty() { byte[] b = new byte[0]; ByteArrayDataInput in = ByteStreams.newDataInput(b); try { in.readInt(); fail("expected exception"); } catch (IllegalStateException expected) { } } public void testNewDataInput_normal() { ByteArrayDataInput in = ByteStreams.newDataInput(bytes); assertEquals(0x12345678, in.readInt()); assertEquals(0x76543210, in.readInt()); try { in.readInt(); fail("expected exception"); } catch (IllegalStateException expected) { } } public void testNewDataInput_readFully() { ByteArrayDataInput in = ByteStreams.newDataInput(bytes); byte[] actual = new byte[bytes.length]; in.readFully(actual); assertEquals(bytes, actual); } public void testNewDataInput_readFullyAndThenSome() { ByteArrayDataInput in = ByteStreams.newDataInput(bytes); byte[] actual = new byte[bytes.length * 2]; try { in.readFully(actual); fail("expected exception"); } catch (IllegalStateException ex) { assertTrue(ex.getCause() instanceof EOFException); } } public void testNewDataInput_readFullyWithOffset() { ByteArrayDataInput in = ByteStreams.newDataInput(bytes); byte[] actual = new byte[4]; in.readFully(actual, 2, 2); assertEquals(0, actual[0]); assertEquals(0, actual[1]); assertEquals(bytes[0], actual[2]); assertEquals(bytes[1], actual[3]); } public void testNewDataInput_readLine() { ByteArrayDataInput in = ByteStreams.newDataInput( "This is a line\r\nThis too\rand this\nand also this".getBytes(Charsets.UTF_8)); assertEquals("This is a line", in.readLine()); assertEquals("This too", in.readLine()); assertEquals("and this", in.readLine()); assertEquals("and also this", in.readLine()); } public void testNewDataInput_readFloat() { byte[] data = {0x12, 0x34, 0x56, 0x78, 0x76, 0x54, 0x32, 0x10}; ByteArrayDataInput in = ByteStreams.newDataInput(data); assertEquals(Float.intBitsToFloat(0x12345678), in.readFloat(), 0.0); assertEquals(Float.intBitsToFloat(0x76543210), in.readFloat(), 0.0); } public void testNewDataInput_readDouble() { byte[] data = {0x12, 0x34, 0x56, 0x78, 0x76, 0x54, 0x32, 0x10}; ByteArrayDataInput in = ByteStreams.newDataInput(data); assertEquals(Double.longBitsToDouble(0x1234567876543210L), in.readDouble(), 0.0); } public void testNewDataInput_readUTF() { byte[] data = new byte[17]; data[1] = 15; System.arraycopy("Kilroy was here".getBytes(Charsets.UTF_8), 0, data, 2, 15); ByteArrayDataInput in = ByteStreams.newDataInput(data); assertEquals("Kilroy was here", in.readUTF()); } public void testNewDataInput_readChar() { byte[] data = "qed".getBytes(Charsets.UTF_16BE); ByteArrayDataInput in = ByteStreams.newDataInput(data); assertEquals('q', in.readChar()); assertEquals('e', in.readChar()); assertEquals('d', in.readChar()); } public void testNewDataInput_readUnsignedShort() { byte[] data = {0, 0, 0, 1, (byte) 0xFF, (byte) 0xFF, 0x12, 0x34}; ByteArrayDataInput in = ByteStreams.newDataInput(data); assertEquals(0, in.readUnsignedShort()); assertEquals(1, in.readUnsignedShort()); assertEquals(65535, in.readUnsignedShort()); assertEquals(0x1234, in.readUnsignedShort()); } public void testNewDataInput_readLong() { byte[] data = {0x12, 0x34, 0x56, 0x78, 0x76, 0x54, 0x32, 0x10}; ByteArrayDataInput in = ByteStreams.newDataInput(data); assertEquals(0x1234567876543210L, in.readLong()); } public void testNewDataInput_readBoolean() { ByteArrayDataInput in = ByteStreams.newDataInput(bytes); assertTrue(in.readBoolean()); } public void testNewDataInput_readByte() { ByteArrayDataInput in = ByteStreams.newDataInput(bytes); for (int i = 0; i < bytes.length; i++) { assertEquals(bytes[i], in.readByte()); } try { in.readByte(); fail("expected exception"); } catch (IllegalStateException ex) { assertTrue(ex.getCause() instanceof EOFException); } } public void testNewDataInput_readUnsignedByte() { ByteArrayDataInput in = ByteStreams.newDataInput(bytes); for (int i = 0; i < bytes.length; i++) { assertEquals(bytes[i], in.readUnsignedByte()); } try { in.readUnsignedByte(); fail("expected exception"); } catch (IllegalStateException ex) { assertTrue(ex.getCause() instanceof EOFException); } } public void testNewDataInput_offset() { ByteArrayDataInput in = ByteStreams.newDataInput(bytes, 2); assertEquals(0x56787654, in.readInt()); try { in.readInt(); fail("expected exception"); } catch (IllegalStateException expected) { } } public void testNewDataInput_skip() { ByteArrayDataInput in = ByteStreams.newDataInput(new byte[2]); assertEquals(2, in.skipBytes(2)); assertEquals(0, in.skipBytes(1)); } public void testNewDataInput_BAIS() { ByteArrayInputStream bais = new ByteArrayInputStream(new byte[] {0x12, 0x34, 0x56, 0x78}); ByteArrayDataInput in = ByteStreams.newDataInput(bais); assertEquals(0x12345678, in.readInt()); } public void testNewDataOutput_empty() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); assertEquals(0, out.toByteArray().length); } public void testNewDataOutput_writeInt() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeInt(0x12345678); out.writeInt(0x76543210); assertEquals(bytes, out.toByteArray()); } public void testNewDataOutput_sized() { ByteArrayDataOutput out = ByteStreams.newDataOutput(4); out.writeInt(0x12345678); out.writeInt(0x76543210); assertEquals(bytes, out.toByteArray()); } public void testNewDataOutput_writeLong() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeLong(0x1234567876543210L); assertEquals(bytes, out.toByteArray()); } public void testNewDataOutput_writeByteArray() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.write(bytes); assertEquals(bytes, out.toByteArray()); } public void testNewDataOutput_writeByte() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.write(0x12); out.writeByte(0x34); assertEquals(new byte[] {0x12, 0x34}, out.toByteArray()); } public void testNewDataOutput_writeByteOffset() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.write(bytes, 4, 2); byte[] expected = {bytes[4], bytes[5]}; assertEquals(expected, out.toByteArray()); } public void testNewDataOutput_writeBoolean() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeBoolean(true); out.writeBoolean(false); byte[] expected = {(byte) 1, (byte) 0}; assertEquals(expected, out.toByteArray()); } public void testNewDataOutput_writeChar() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeChar('a'); assertEquals(new byte[] {0, 97}, out.toByteArray()); } public void testNewDataOutput_writeChars() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeChars("r\u00C9sum\u00C9"); // need to remove byte order mark before comparing byte[] expected = Arrays.copyOfRange("r\u00C9sum\u00C9".getBytes(Charsets.UTF_16), 2, 14); assertEquals(expected, out.toByteArray()); } public void testNewDataOutput_writeUTF() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeUTF("r\u00C9sum\u00C9"); byte[] expected ="r\u00C9sum\u00C9".getBytes(Charsets.UTF_8); byte[] actual = out.toByteArray(); // writeUTF writes the length of the string in 2 bytes assertEquals(0, actual[0]); assertEquals(expected.length, actual[1]); assertEquals(expected, Arrays.copyOfRange(actual, 2, actual.length)); } public void testNewDataOutput_writeShort() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeShort(0x1234); assertEquals(new byte[] {0x12, 0x34}, out.toByteArray()); } public void testNewDataOutput_writeDouble() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeDouble(Double.longBitsToDouble(0x1234567876543210L)); assertEquals(bytes, out.toByteArray()); } public void testNewDataOutput_writeFloat() { ByteArrayDataOutput out = ByteStreams.newDataOutput(); out.writeFloat(Float.intBitsToFloat(0x12345678)); out.writeFloat(Float.intBitsToFloat(0x76543210)); assertEquals(bytes, out.toByteArray()); } public void testNewDataOutput_BAOS() { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ByteArrayDataOutput out = ByteStreams.newDataOutput(baos); out.writeInt(0x12345678); assertEquals(4, baos.size()); assertEquals(new byte[] {0x12, 0x34, 0x56, 0x78}, baos.toByteArray()); } public void testToByteArray_withSize_givenCorrectSize() throws IOException { InputStream in = newTestStream(100); byte[] b = ByteStreams.toByteArray(in, 100); assertEquals(100, b.length); } public void testToByteArray_withSize_givenSmallerSize() throws IOException { InputStream in = newTestStream(100); byte[] b = ByteStreams.toByteArray(in, 80); assertEquals(100, b.length); } public void testToByteArray_withSize_givenLargerSize() throws IOException { InputStream in = newTestStream(100); byte[] b = ByteStreams.toByteArray(in, 120); assertEquals(100, b.length); } public void testToByteArray_withSize_givenSizeZero() throws IOException { InputStream in = newTestStream(100); byte[] b = ByteStreams.toByteArray(in, 0); assertEquals(100, b.length); } private static InputStream newTestStream(int n) { return new ByteArrayInputStream(newPreFilledByteArray(n)); } /** Stream that will skip a maximum number of bytes at a time. */ private static class SlowSkipper extends FilterInputStream { private final long max; public SlowSkipper(InputStream in, long max) { super(in); this.max = max; } @Override public long skip(long n) throws IOException { return super.skip(Math.min(max, n)); } } public void testReadBytes() throws IOException { final byte[] array = newPreFilledByteArray(1000); assertEquals(array, ByteStreams.readBytes( new ByteArrayInputStream(array), new TestByteProcessor())); } private class TestByteProcessor implements ByteProcessor<byte[]> { private final ByteArrayOutputStream out = new ByteArrayOutputStream(); @Override public boolean processBytes(byte[] buf, int off, int len) throws IOException { out.write(buf, off, len); return true; } @Override public byte[] getResult() { return out.toByteArray(); } } public void testByteProcessorStopEarly() throws IOException { byte[] array = newPreFilledByteArray(10000); assertEquals((Integer) 42, ByteStreams.readBytes(new ByteArrayInputStream(array), new ByteProcessor<Integer>() { @Override public boolean processBytes(byte[] buf, int off, int len) { assertEquals( copyOfRange(buf, off, off + len), newPreFilledByteArray(8192)); return false; } @Override public Integer getResult() { return 42; } })); } public void testNullOutputStream() throws Exception { // create a null output stream OutputStream nos = ByteStreams.nullOutputStream(); // write to the output stream nos.write('n'); String test = "Test string for NullOutputStream"; nos.write(test.getBytes()); nos.write(test.getBytes(), 2, 10); // nothing really to assert? assertSame(ByteStreams.nullOutputStream(), ByteStreams.nullOutputStream()); } public void testLimit() throws Exception { byte[] big = newPreFilledByteArray(5); InputStream bin = new ByteArrayInputStream(big); InputStream lin = ByteStreams.limit(bin, 2); // also test available lin.mark(2); assertEquals(2, lin.available()); int read = lin.read(); assertEquals(big[0], read); assertEquals(1, lin.available()); read = lin.read(); assertEquals(big[1], read); assertEquals(0, lin.available()); read = lin.read(); assertEquals(-1, read); lin.reset(); byte[] small = new byte[5]; read = lin.read(small); assertEquals(2, read); assertEquals(big[0], small[0]); assertEquals(big[1], small[1]); lin.reset(); read = lin.read(small, 2, 3); assertEquals(2, read); assertEquals(big[0], small[2]); assertEquals(big[1], small[3]); } public void testLimit_mark() throws Exception { byte[] big = newPreFilledByteArray(5); InputStream bin = new ByteArrayInputStream(big); InputStream lin = ByteStreams.limit(bin, 2); int read = lin.read(); assertEquals(big[0], read); lin.mark(2); read = lin.read(); assertEquals(big[1], read); read = lin.read(); assertEquals(-1, read); lin.reset(); read = lin.read(); assertEquals(big[1], read); read = lin.read(); assertEquals(-1, read); } public void testLimit_skip() throws Exception { byte[] big = newPreFilledByteArray(5); InputStream bin = new ByteArrayInputStream(big); InputStream lin = ByteStreams.limit(bin, 2); // also test available lin.mark(2); assertEquals(2, lin.available()); lin.skip(1); assertEquals(1, lin.available()); lin.reset(); assertEquals(2, lin.available()); lin.skip(3); assertEquals(0, lin.available()); } public void testLimit_markNotSet() { byte[] big = newPreFilledByteArray(5); InputStream bin = new ByteArrayInputStream(big); InputStream lin = ByteStreams.limit(bin, 2); try { lin.reset(); fail(); } catch (IOException expected) { assertEquals("Mark not set", expected.getMessage()); } } public void testLimit_markNotSupported() { InputStream lin = ByteStreams.limit(new UnmarkableInputStream(), 2); try { lin.reset(); fail(); } catch (IOException expected) { assertEquals("Mark not supported", expected.getMessage()); } } private static class UnmarkableInputStream extends InputStream { @Override public int read() throws IOException { return 0; } @Override public boolean markSupported() { return false; } } private static byte[] copyOfRange(byte[] in, int from, int to) { byte[] out = new byte[to - from]; for (int i = 0; i < to - from; i++) { out[i] = in[from + i]; } return out; } private static void assertEquals(byte[] expected, byte[] actual) { assertEquals("Arrays differed in size", expected.length, actual.length); assertTrue("Array contents were different", Arrays.equals(expected, actual)); } }
package game; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Arrays; /** * Created by Al on 18/01/2016. */ public class Connect4 { public static final String NO_WINNER = "-"; public static final String RED_WINNER = "R"; public static final String GREEN_WINNER = "G"; public static final int BOARD_ROWS = 6; public static final int BOARD_COLUMNS = 7; private static final int DISCS_TO_WIN = 4; private Color[][] board; private Color turn; private ByteArrayOutputStream outputStream; public Connect4(ByteArrayOutputStream outputStream){ this.outputStream = outputStream; this.turn = Color.redColor(); this.board = new Color[BOARD_ROWS][BOARD_COLUMNS]; for (Color[] row:board){ Arrays.fill(row, Color.emptyColor()); } } public boolean isFinished(){ return getNumberOfDiscs() == BOARD_ROWS * BOARD_COLUMNS; } public String winner(){ for (int i=0;i<BOARD_COLUMNS;i++){ StringBuilder columnBuilder = new StringBuilder(); for (int j=0;j<BOARD_ROWS;j++){ columnBuilder.append(board[j][i].toString()); } String column = columnBuilder.toString(); String result = checkLine(column); if (!result.equals(NO_WINNER)){ return result; } } for (int i=0;i<BOARD_ROWS;i++){ StringBuilder rowBuilder = new StringBuilder(); for (int j=0;j<BOARD_COLUMNS;j++){ rowBuilder.append(board[i][j].toString()); } String row = rowBuilder.toString(); String result = checkLine(row); if (!result.equals(NO_WINNER)){ return result; } } for (int i=0;i<=BOARD_ROWS-DISCS_TO_WIN;i++){ for(int k=0;k < BOARD_COLUMNS - DISCS_TO_WIN;k++) { StringBuilder diagonalBuilder = new StringBuilder(); for (int j = 0; j <= BOARD_COLUMNS - DISCS_TO_WIN; j++) { diagonalBuilder.append(board[i+j][k+j].toString()); } String diagonal = diagonalBuilder.toString(); String result = checkLine(diagonal); if (!result.equals(NO_WINNER)) { return result; } } } for (int i=0;i<=BOARD_ROWS-DISCS_TO_WIN;i++){ for(int k = DISCS_TO_WIN-1;k < BOARD_COLUMNS;k++) { StringBuilder diagonalBuilder = new StringBuilder(); for (int j = 0; j <= BOARD_COLUMNS - DISCS_TO_WIN; j++) { diagonalBuilder.append(board[i+j][k-j].toString()); } String diagonal = diagonalBuilder.toString(); String result = checkLine(diagonal); if (!result.equals(NO_WINNER)) { return result; } } } return NO_WINNER; } private String checkLine(String line){ //line = line.replaceAll(" ", ""); if (line.length() < DISCS_TO_WIN){ return NO_WINNER; }else if(line.contains("RRRR")){ return RED_WINNER; }else if(line.contains("GGGG")){ return GREEN_WINNER; } return NO_WINNER; } public int getNumberOfDiscs(){ int numberOfDiscs = 0; for (int i=0;i<BOARD_ROWS;i++){ for (int j=0;j<BOARD_COLUMNS;j++ ){ if (!board[i][j].equals(Color.emptyColor())){ numberOfDiscs++; } } } return numberOfDiscs; } public int insertDisc(int column) throws OutOfBoardException, ColumnFullOfDIscsException{ if (column < 1 || column >BOARD_COLUMNS) { throw new OutOfBoardException(); } int insertRow = 0; column = fixedColumn(column); if(isColumnFull(column)){ throw new ColumnFullOfDIscsException(); }else{ insertRow = rowForDisc(column); board[insertRow][column] = turn; switchPlayer(); } String boardOutput = buildBoardOutput(); tryPrintOutput(boardOutput); return insertRow; } private String buildBoardOutput(){ StringBuilder boardBuilder = new StringBuilder(); for (int i=BOARD_ROWS-1;i>=0;i--){ String row = buildRowOutput(i); boardBuilder.append(row); boardBuilder.append("\n"); } return boardBuilder.toString(); } private String buildRowOutput(int i){ StringBuilder rowBuilder = new StringBuilder(); for (int j=0;j<BOARD_COLUMNS;j++){ rowBuilder.append("|"); rowBuilder.append(board[i][j]); } rowBuilder.append("|"); return rowBuilder.toString(); } public boolean isColumnFull(int column){ if (!board[fixedRow(BOARD_ROWS)][column].equals(Color.emptyColor())){ return true; } return false; } public Color getNextTurn(){ String message = "R turn"; tryPrintOutput(message); return turn; } private void tryPrintOutput(String message){ try { outputStream.reset(); outputStream.write(message.getBytes()); } catch (IOException e) { e.printStackTrace(); } } private void switchPlayer(){ if (turn.equals(Color.greenColor())){ turn = Color.redColor(); }else { turn = Color.greenColor(); } } private int rowForDisc(int column){ int row = 0; for(int i=0;i<BOARD_ROWS;i++){ if (board[i][column].equals(Color.emptyColor())){ row = i; break; } } return row; } private int fixedColumn(int column){ return column - 1; } private int fixedRow(int row){ return row - 1; } }
/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.janmuller.android.simplecropimage; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Matrix; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.os.Handler; import android.util.AttributeSet; import android.view.KeyEvent; import android.widget.ImageView; abstract class ImageViewTouchBase extends ImageView { @SuppressWarnings("unused") private static final String TAG = "ImageViewTouchBase"; // This is the base transformation which is used to show the image // initially. The current computation for this shows the image in // it's entirety, letterboxing as needed. One could choose to // show the image as cropped instead. // // This matrix is recomputed when we go from the thumbnail image to // the full size image. private final Matrix mBaseMatrix = new Matrix(); // This is the supplementary transformation which reflects what // the user has done in terms of zooming and panning. // // This matrix remains the same when we go from the thumbnail image // to the full size image. private final Matrix mSuppMatrix = new Matrix(); // This is the final matrix which is computed as the concatentation // of the base matrix and the supplementary matrix. private final Matrix mDisplayMatrix = new Matrix(); // Temporary buffer used for getting the values out of a matrix. private final float[] mMatrixValues = new float[9]; // The current bitmap being displayed. final RotateBitmap mBitmapDisplayed = new RotateBitmap(null); private int mThisWidth = -1; private int mThisHeight = -1; private float mMaxZoom; int mLeft; int mRight; int mTop; int mBottom; // ImageViewTouchBase will pass a Bitmap to the Recycler if it has finished // its use of that Bitmap. public interface Recycler { void recycle(Bitmap b); } public void setRecycler(Recycler r) { mRecycler = r; } private Recycler mRecycler; @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); mLeft = left; mRight = right; mTop = top; mBottom = bottom; mThisWidth = right - left; mThisHeight = bottom - top; Runnable r = mOnLayoutRunnable; if (r != null) { mOnLayoutRunnable = null; r.run(); } if (mBitmapDisplayed.getBitmap() != null) { getProperBaseMatrix(mBitmapDisplayed, mBaseMatrix); setImageMatrix(getImageViewMatrix()); } } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK && getScale() > 1.0f) { // If we're zoomed in, pressing Back jumps out to show the entire // image, otherwise Back returns the user to the gallery. zoomTo(1.0f); return true; } return super.onKeyDown(keyCode, event); } private final Handler mHandler = new Handler(); @Override public void setImageBitmap(Bitmap bitmap) { setImageBitmap(bitmap, 0); } private void setImageBitmap(Bitmap bitmap, int rotation) { super.setImageBitmap(bitmap); Drawable d = getDrawable(); if (d != null) { d.setDither(true); } Bitmap old = mBitmapDisplayed.getBitmap(); mBitmapDisplayed.setBitmap(bitmap); mBitmapDisplayed.setRotation(rotation); if (old != null && old != bitmap && mRecycler != null) { mRecycler.recycle(old); } } public void clear() { setImageBitmapResetBase(null, true); } private Runnable mOnLayoutRunnable = null; // This function changes bitmap, reset base matrix according to the size // of the bitmap, and optionally reset the supplementary matrix. public void setImageBitmapResetBase(final Bitmap bitmap, @SuppressWarnings("SameParameterValue") final boolean resetSupp) { setImageRotateBitmapResetBase(new RotateBitmap(bitmap), resetSupp); } public void setImageRotateBitmapResetBase(final RotateBitmap bitmap, final boolean resetSupp) { final int viewWidth = getWidth(); if (viewWidth <= 0) { mOnLayoutRunnable = new Runnable() { public void run() { setImageRotateBitmapResetBase(bitmap, resetSupp); } }; return; } if (bitmap.getBitmap() != null) { getProperBaseMatrix(bitmap, mBaseMatrix); setImageBitmap(bitmap.getBitmap(), bitmap.getRotation()); } else { mBaseMatrix.reset(); setImageBitmap(null); } if (resetSupp) { mSuppMatrix.reset(); } setImageMatrix(getImageViewMatrix()); mMaxZoom = maxZoom(); } // Center as much as possible in one or both axis. Centering is // defined as follows: if the image is scaled down below the // view's dimensions then center it (literally). If the image // is scaled larger than the view and is translated out of view // then translate it back into view (i.e. eliminate black bars). void center(@SuppressWarnings("SameParameterValue") boolean horizontal, @SuppressWarnings("SameParameterValue") boolean vertical) { if (mBitmapDisplayed.getBitmap() == null) { return; } Matrix m = getImageViewMatrix(); RectF rect = new RectF(0, 0, mBitmapDisplayed.getBitmap().getWidth(), mBitmapDisplayed.getBitmap().getHeight()); m.mapRect(rect); float height = rect.height(); float width = rect.width(); float deltaX = 0, deltaY = 0; if (vertical) { int viewHeight = getHeight(); if (height < viewHeight) { deltaY = (viewHeight - height) / 2 - rect.top; } else if (rect.top > 0) { deltaY = -rect.top; } else if (rect.bottom < viewHeight) { deltaY = getHeight() - rect.bottom; } } if (horizontal) { int viewWidth = getWidth(); if (width < viewWidth) { deltaX = (viewWidth - width) / 2 - rect.left; } else if (rect.left > 0) { deltaX = -rect.left; } else if (rect.right < viewWidth) { deltaX = viewWidth - rect.right; } } postTranslate(deltaX, deltaY); setImageMatrix(getImageViewMatrix()); } public ImageViewTouchBase(Context context) { super(context); init(); } public ImageViewTouchBase(Context context, AttributeSet attrs) { super(context, attrs); init(); } private void init() { setScaleType(ScaleType.MATRIX); } private float getValue(Matrix matrix, @SuppressWarnings("SameParameterValue") int whichValue) { matrix.getValues(mMatrixValues); return mMatrixValues[whichValue]; } // Get the scale factor out of the matrix. private float getScale(Matrix matrix) { return getValue(matrix, Matrix.MSCALE_X); } float getScale() { return getScale(mSuppMatrix); } // Setup the base matrix so that the image is centered and scaled properly. private void getProperBaseMatrix(RotateBitmap bitmap, Matrix matrix) { float viewWidth = getWidth(); float viewHeight = getHeight(); float w = bitmap.getWidth(); float h = bitmap.getHeight(); matrix.reset(); // We limit up-scaling to 2x otherwise the result may look bad if it's // a small icon. float widthScale = Math.min(viewWidth / w, 2.0f); float heightScale = Math.min(viewHeight / h, 2.0f); float scale = Math.min(widthScale, heightScale); matrix.postConcat(bitmap.getRotateMatrix()); matrix.postScale(scale, scale); matrix.postTranslate( (viewWidth - w * scale) / 2F, (viewHeight - h * scale) / 2F); } // Combine the base matrix and the supp matrix to make the final matrix. private Matrix getImageViewMatrix() { // The final matrix is computed as the concatentation of the base matrix // and the supplementary matrix. mDisplayMatrix.set(mBaseMatrix); mDisplayMatrix.postConcat(mSuppMatrix); return mDisplayMatrix; } private static final float SCALE_RATE = 1.25F; // Sets the maximum zoom, which is a scale relative to the base matrix. It // is calculated to show the image at 400% zoom regardless of screen or // image orientation. If in the future we decode the full 3 megapixel image, // rather than the current 1024x768, this should be changed down to 200%. private float maxZoom() { if (mBitmapDisplayed.getBitmap() == null) { return 1F; } float fw = (float) mBitmapDisplayed.getWidth() / (float) mThisWidth; float fh = (float) mBitmapDisplayed.getHeight() / (float) mThisHeight; return Math.max(fw, fh) * 4; } void zoomTo(float scale, float centerX, float centerY) { if (scale > mMaxZoom) { scale = mMaxZoom; } float oldScale = getScale(); float deltaScale = scale / oldScale; mSuppMatrix.postScale(deltaScale, deltaScale, centerX, centerY); setImageMatrix(getImageViewMatrix()); center(true, true); } void zoomTo(final float scale, final float centerX, final float centerY, @SuppressWarnings("SameParameterValue") final float durationMs) { final float incrementPerMs = (scale - getScale()) / durationMs; final float oldScale = getScale(); final long startTime = System.currentTimeMillis(); mHandler.post(new Runnable() { public void run() { long now = System.currentTimeMillis(); float currentMs = Math.min(durationMs, now - startTime); float target = oldScale + (incrementPerMs * currentMs); zoomTo(target, centerX, centerY); if (currentMs < durationMs) { mHandler.post(this); } } }); } private void zoomTo(@SuppressWarnings("SameParameterValue") float scale) { float cx = getWidth() / 2F; float cy = getHeight() / 2F; zoomTo(scale, cx, cy); } void zoomIn() { zoomIn(SCALE_RATE); } void zoomOut() { zoomOut(SCALE_RATE); } private void zoomIn(@SuppressWarnings("SameParameterValue") float rate) { if (getScale() >= mMaxZoom) { return; // Don't let the user zoom into the molecular level. } if (mBitmapDisplayed.getBitmap() == null) { return; } float cx = getWidth() / 2F; float cy = getHeight() / 2F; mSuppMatrix.postScale(rate, rate, cx, cy); setImageMatrix(getImageViewMatrix()); } private void zoomOut(@SuppressWarnings("SameParameterValue") float rate) { if (mBitmapDisplayed.getBitmap() == null) { return; } float cx = getWidth() / 2F; float cy = getHeight() / 2F; // Zoom out to at most 1x. Matrix tmp = new Matrix(mSuppMatrix); tmp.postScale(1F / rate, 1F / rate, cx, cy); if (getScale(tmp) < 1F) { mSuppMatrix.setScale(1F, 1F, cx, cy); } else { mSuppMatrix.postScale(1F / rate, 1F / rate, cx, cy); } setImageMatrix(getImageViewMatrix()); center(true, true); } void postTranslate(float dx, float dy) { mSuppMatrix.postTranslate(dx, dy); } void panBy(float dx, float dy) { postTranslate(dx, dy); setImageMatrix(getImageViewMatrix()); } }
/******************************************************************************* Copyright (c) 2014,2015, Oracle and/or its affiliates. All rights reserved. $revision_history$ 06-feb-2013 Steven Davelaar 1.0 initial creation ******************************************************************************/ package oracle.ateam.sample.mobile.dt.view.uipanel; import java.awt.BorderLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeMap; import javax.swing.DefaultComboBoxModel; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextField; import oracle.ateam.sample.mobile.dt.model.DataObjectInfo; import oracle.ateam.sample.mobile.dt.model.BusinessObjectGeneratorModel; import oracle.ateam.sample.mobile.dt.model.UIAttributeInfo; import oracle.ateam.sample.mobile.dt.model.UIDataObjectInfo; import oracle.ateam.sample.mobile.dt.model.UIGeneratorModel; import oracle.ateam.sample.mobile.dt.view.wizard.BusinessObjectsFromWSDataControlWizard; import oracle.ateam.sample.mobile.dt.view.wizard.UIGeneratorWizard; import oracle.ide.panels.DefaultTraversablePanel; import oracle.ide.panels.TraversableContext; public class UIGeneratorSettingsPanelDynamic extends DefaultTraversablePanel implements ActionListener { private UIGeneratorModel model; JLabel instruction = new JLabel("Set UI generation properties for each data object"); JLabel doiLabel = new JLabel("Data Object"); JComboBox doilist = new JComboBox(); transient Map<String, UIDataObjectInfo> dataObjectMap = new HashMap<String, UIDataObjectInfo>(); private JLabel layoutStyleLabel = new JLabel("Layout Style"); private JComboBox layoutStyleField = new JComboBox(); private JLabel displayTitleSingularLabel = new JLabel("Display Title Singular"); private JTextField displayTitleSingularField = new JTextField(); private JLabel displayTitlePluralLabel = new JLabel("Display Title Plural"); private JTextField displayTitlePluralField = new JTextField(); private JLabel listAttributeLabel = new JLabel("List Attribute"); private JComboBox listAttributeField = new JComboBox(); private JLabel createLabel = new JLabel("Create Allowed?"); private JCheckBox createField = new JCheckBox(); private JLabel updateLabel = new JLabel("Update Allowed?"); private JCheckBox updateField = new JCheckBox(); private JLabel deleteLabel = new JLabel("Delete Allowed?"); private JCheckBox deleteField = new JCheckBox(); private JLabel samePageLabel = new JLabel("Show on Parent Page?"); private JCheckBox samePageField = new JCheckBox(); private JLabel quickSearchLabel = new JLabel("Add Quick Search?"); private JCheckBox quickSearchField = new JCheckBox(); private UIDataObjectInfo currentDataObject; public UIGeneratorSettingsPanelDynamic() { // GridBagConstraints(int gridx, int gridy, int gridwidth, int gridheight, double weightx, double weighty // , int anchor, int fill, Insets insets, int ipadx, int ipady) // Insets(int top, int left, int bottom, int right) doilist.addActionListener(this); layoutStyleField.addItem(UIDataObjectInfo.LAYOUT_STYLE_LIST_FORM); layoutStyleField.addItem(UIDataObjectInfo.LAYOUT_STYLE_LIST); layoutStyleField.addItem(UIDataObjectInfo.LAYOUT_STYLE_FORM); setLayout(new BorderLayout(0, 15)); JPanel contentPanel = new JPanel(); add(contentPanel, BorderLayout.NORTH); GridBagLayout containerLayout = new GridBagLayout(); contentPanel.setLayout(containerLayout); GridBagConstraints gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = 0; gbc.gridwidth = 1; gbc.gridheight = 1; gbc.anchor = GridBagConstraints.NORTHWEST; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.insets = new Insets(0, 0, 20, 5); contentPanel.add(doiLabel, gbc); gbc.gridx++; gbc.weightx = 1.0f; contentPanel.add(doilist, gbc); gbc.insets = new Insets(0, 0, 5, 5); Map<JComponent,JComponent> fields = new TreeMap<JComponent,JComponent>(); fields.put(layoutStyleLabel, layoutStyleField); fields.put(displayTitleSingularLabel, displayTitleSingularField); fields.put(displayTitlePluralLabel, displayTitlePluralField); fields.put(listAttributeLabel, listAttributeField); fields.put(createLabel, createField); fields.put(updateLabel, updateField); fields.put(deleteLabel, deleteField); fields.put(samePageLabel, samePageField); fields.put(quickSearchLabel, quickSearchField); Iterator<JComponent> labels =fields.keySet().iterator(); while (labels.hasNext()) { JComponent label = labels.next(); JComponent field = fields.get(label); gbc.gridy++; gbc.gridx = 0; contentPanel.add(label, gbc); gbc.gridx++; gbc.weightx = 1.0f; contentPanel.add(field, gbc); gbc.weightx = 0; } } private void populateDataObjectList() { List<UIDataObjectInfo> dois = model.getDataObjectInfos(); List<String> dataObjectNames = new ArrayList<String>(); for (UIDataObjectInfo doi: dois) { dataObjectMap.put(doi.getName(), doi); dataObjectNames.add(doi.getName()); } doilist.setModel(new DefaultComboBoxModel(dataObjectNames.toArray())); } private void populateListAttributeList() { List<String> attrNames = new ArrayList<String>(); for (UIAttributeInfo attr: currentDataObject.getAttributeDefs()) { attrNames.add(attr.getAttrName()); } listAttributeField.setModel(new DefaultComboBoxModel(attrNames.toArray())); } public void onEntry(TraversableContext tc) { super.onEntry(tc); model = (UIGeneratorModel) tc.get(UIGeneratorWizard.MODEL_KEY); populateDataObjectList(); if (doilist.getItemCount() > 0) { UIDataObjectInfo doi = dataObjectMap.get(doilist.getSelectedItem()); setCurrentDataObject(doi); doilist.setSelectedItem(doilist.getItemAt(0)); } } public void onExit(TraversableContext tc) { saveDataObjectProps(); } @Override public void actionPerformed(ActionEvent e) { String dataObject = (String) doilist.getSelectedItem(); saveDataObjectProps(); setCurrentDataObject(dataObjectMap.get(dataObject)); } private void saveDataObjectProps() { getCurrentDataObject().setLayoutStyle((String) layoutStyleField.getSelectedItem()); getCurrentDataObject().setDisplayTitleSingular(displayTitleSingularField.getText()); getCurrentDataObject().setDisplayTitlePlural(displayTitlePluralField.getText()); getCurrentDataObject().setListAttribute1((String) listAttributeField.getSelectedItem()); getCurrentDataObject().setCreate(createField.isSelected()); getCurrentDataObject().setUpdate(updateField.isSelected()); getCurrentDataObject().setDelete(deleteField.isSelected()); getCurrentDataObject().setSamePage(samePageField.isSelected()); getCurrentDataObject().setHasQuickSearch(quickSearchField.isSelected()); } public void setCurrentDataObject(UIDataObjectInfo currentDataObject) { this.currentDataObject = currentDataObject; populateListAttributeList(); layoutStyleField.setSelectedItem(currentDataObject.getLayoutStyle()); displayTitleSingularField.setText(currentDataObject.getDisplayTitleSingular()); displayTitlePluralField.setText(currentDataObject.getDisplayTitlePlural()); listAttributeField.setSelectedItem(currentDataObject.getListAttribute1()); createField.setSelected(currentDataObject.isCreate()); updateField.setSelected(currentDataObject.isUpdate()); deleteField.setSelected(currentDataObject.isDelete()); samePageField.setSelected(currentDataObject.isSamePage()); quickSearchField.setSelected(currentDataObject.isHasQuickSearch()); boolean hasParent = currentDataObject.getParent()!=null; // show quick search only for top data objects // hide samePage for top data objects // samePageLabel.setVisible(hasParent); // samePageField.setVisible(hasParent); samePageField.setEnabled(hasParent); // quickSearchLabel.setVisible(!hasParent); // quickSearchField.setVisible(!hasParent); quickSearchField.setEnabled(!hasParent); } public UIDataObjectInfo getCurrentDataObject() { return currentDataObject; } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.starlark.java.eval; import com.google.common.base.Preconditions; import com.google.common.base.Verify; import com.google.common.collect.ImmutableList; import java.util.ArrayList; import javax.annotation.Nullable; import net.starlark.java.syntax.BinaryOperatorExpression; import net.starlark.java.syntax.Expression; import net.starlark.java.syntax.IfStatement; import net.starlark.java.syntax.TokenKind; import net.starlark.java.syntax.UnaryOperatorExpression; /** Compile {@code if} statement. */ class BcCompilerForIf { private final BcCompiler compiler; BcCompilerForIf(BcCompiler compiler) { this.compiler = compiler; } /** Compile expression just for side effects. */ private void compileForEffect(BcIr ir, BoolExpr cond) { Preconditions.checkState( cond.maybeConst != null, "Can only compile const expressions: %s", cond); cond.match( new BoolExprMatcher() { @Override public void constExpr(ConstExpr constExpr) { // no-op } @Override public void other(OtherExpr otherExpr) { throw new AssertionError("expression is not const: " + otherExpr); } @Override public void binOp(BinOpExpr binOpExpr) { Boolean lhsConst = binOpExpr.lhs.maybeConst; if (lhsConst != null) { compileForEffect(ir, binOpExpr.lhs); // `lhs` is const. So we need to compile `rhs` for effect only if either: // * `(lhs === True) AND ...` // * `(lhs === False) OR ...` if (lhsConst == (binOpExpr.binOp == BinOp.AND)) { compileForEffect(ir, binOpExpr.rhs); } } else { // Compile AND expression like: // ``` // if lhs: // rhs // ``` // and OR expression like: // ``` // if not lhs: // rhs // ``` ArrayList<BcIrInstr.JumpLabel> thenAddrs = new ArrayList<>(); ArrayList<BcIrInstr.JumpLabel> elseAddrs = new ArrayList<>(); compileCond(ir, binOpExpr.lhs, binOpExpr.binOp != BinOp.AND, elseAddrs, thenAddrs); ir.addJumpLabels(thenAddrs); compileForEffect(ir, binOpExpr.rhs); ir.addJumpLabels(elseAddrs); } } @Override public void not(NotExpr notExpr) { compileForEffect(ir, notExpr.arg); } }); } /** Compile if statement condition. */ // The parameters are these: // * `cond` is a condition we are compiling // * `elseJumpCond` is a condition on which we should jump // * `elseJumps` is where we should jump if condition is satisfied // * `thenJumps` is where we _can_ jump if condition is not satisfied // but if condition is not satisfied, we are allowed to just continue execution. // // Consider compilation of this expression: // ``` // if (x or y) and z: ... // ``` // With all the trickery, we can compile condition with no temporary variables // and zero `BR` instructions: // ``` // IF_BR_LOCAL x @z // IF_NOT_BR_LOCAL y @else // @z: // IF_NOT_BR_LOCAL z @else // <THEN> // BR @end // @else: // <ELSE> // @end: // ``` // Local variables are never used, but `BR` instructions are rarely used // for certain complicated cases when evaluating conditions with side effects. private void compileCond( BcIr ir, BoolExpr cond, boolean elseJumpCond, ArrayList<BcIrInstr.JumpLabel> elseJumps, ArrayList<BcIrInstr.JumpLabel> thenJumps) { Preconditions.checkState( cond.maybeConst == null, "Can only compile non-const expressions: %s", cond); cond.match( new BoolExprMatcher() { @Override public void not(NotExpr notExpr) { compileCond(ir, ((NotExpr) cond).arg, !elseJumpCond, elseJumps, thenJumps); } @Override public void binOp(BinOpExpr binOpExpr) { Boolean xConst = binOpExpr.lhs.maybeConst; Boolean yConst = binOpExpr.rhs.maybeConst; Verify.verify(xConst == null || yConst == null); // Avoid unnecessary jumps in generated code when `binOpExpr` // is equivalent to `lhs`. if (xConst != null && !binOpExpr.lhs.hasEffects) { compileCond(ir, binOpExpr.rhs, elseJumpCond, elseJumps, thenJumps); return; } if (yConst != null && !binOpExpr.rhs.hasEffects) { compileCond(ir, binOpExpr.lhs, elseJumpCond, elseJumps, thenJumps); return; } Verify.verify(binOpExpr.lhs.hasEffects); Verify.verify(binOpExpr.rhs.hasEffects); BinOp binOp = binOpExpr.binOp; if ((binOp == BinOp.AND) != elseJumpCond) { // This branch handles either of expressions: // expression | elseJumpCond // --------------+-------- // x and y | false // not (x or y) | true if (xConst != null) { Verify.verify(xConst != elseJumpCond); compileForEffect(ir, binOpExpr.lhs); } else { ArrayList<BcIrInstr.JumpLabel> xSkip = new ArrayList<>(); compileCond(ir, binOpExpr.lhs, elseJumpCond, elseJumps, xSkip); ir.addJumpLabels(xSkip); } if (yConst != null) { Verify.verify(yConst != elseJumpCond); compileForEffect(ir, binOpExpr.rhs); } else { compileCond(ir, binOpExpr.rhs, elseJumpCond, elseJumps, thenJumps); } } else { // This branch handles either of expressions: // expression | elseJumpCond // --------------+-------- // x or y | false // not (x and y) | true if (xConst != null) { Verify.verify(xConst == elseJumpCond); compileForEffect(ir, binOpExpr.lhs); } else { ArrayList<BcIrInstr.JumpLabel> xSkip = new ArrayList<>(); compileCond(ir, binOpExpr.lhs, !elseJumpCond, thenJumps, xSkip); ir.addJumpLabels(xSkip); } if (yConst != null) { Verify.verify(yConst == elseJumpCond); compileForEffect(ir, binOpExpr.rhs); // This is a tricky part: we compile expression like: // ``` // if x or y: ... else: ... // ``` // Where: // * `x` is not const // * `x` was evaluated to `true` // * `y` is const false // * `y` has side effects // Thus we need to evaluate `y` for side effects, // but also unconditionally jump to else. elseJumps.add(ir.br(compiler.nodeToLocOffset(binOpExpr.expr))); } else { compileCond(ir, binOpExpr.rhs, elseJumpCond, elseJumps, thenJumps); } } } @Override public void constExpr(ConstExpr constExpr) { throw new AssertionError("expression is const: " + constExpr); } @Override public void other(OtherExpr otherExpr) { writeOtherCond(ir, otherExpr, elseJumpCond, elseJumps); } }); } /** Write condition which is not logical expression. */ private void writeOtherCond( BcIr ir, OtherExpr otherExpr, boolean elseJumpCond, ArrayList<BcIrInstr.JumpLabel> elseJumps) { // Try compile condition as type is br BcIr.PopTypeIs popTypeIs = otherExpr.result.ir.popTypeIs(otherExpr.result.result.slot); if (popTypeIs != null) { writeOtherCondTypeIs(ir, otherExpr, popTypeIs, elseJumpCond, elseJumps); return; } // Try compile condition as bin op br if (otherExpr.expr instanceof BinaryOperatorExpression) { BinaryOperatorExpression binaryOperatorExpression = (BinaryOperatorExpression) otherExpr.expr; BcWriter.JumpBindCond jumpBindCond = BcWriter.JumpBindCond.fromBinOpToken(binaryOperatorExpression.getOperator()); if (jumpBindCond != null) { writeOtherCondBinOp(ir, binaryOperatorExpression, elseJumpCond, jumpBindCond, elseJumps); return; } } // Write default conditional jump writeOtherCondDefault(ir, otherExpr, elseJumpCond, elseJumps); } private void writeOtherCondTypeIs( BcIr ir, OtherExpr otherExpr, BcIr.PopTypeIs popTypeIs, boolean elseJumpCond, ArrayList<BcIrInstr.JumpLabel> elseJumps) { ir.addAll(otherExpr.result.ir); BcWriter.JumpCond jumpCond = elseJumpCond ? BcWriter.JumpCond.IF : BcWriter.JumpCond.IF_NOT; BcIrIfCond.TypeIs ifCond = new BcIrIfCond.TypeIs(popTypeIs.value, popTypeIs.type, jumpCond); BcIrInstr.JumpLabel jumpLabel = ir.ifBr(compiler.nodeToLocOffset(otherExpr.expr), ifCond); elseJumps.add(jumpLabel); } private void writeOtherCondBinOp( BcIr ir, BinaryOperatorExpression binaryOperatorExpression, boolean elseJumpCond, BcWriter.JumpBindCond cond, ArrayList<BcIrInstr.JumpLabel> elseJumps) { BcIrSlot x = compiler.compileExpression(ir, binaryOperatorExpression.getX()).slot; BcIrSlot y = compiler.compileExpression(ir, binaryOperatorExpression.getY()).slot; BcWriter.JumpBindCond jumpBindCond = elseJumpCond ? cond : cond.not(); BcIrIfCond.Bin ifCond = new BcIrIfCond.Bin(x, y, jumpBindCond); BcIrInstr.JumpLabel jumpLabel = ir.ifBr(compiler.nodeToLocOffset(binaryOperatorExpression), ifCond); elseJumps.add(jumpLabel); } private void writeOtherCondDefault( BcIr ir, OtherExpr otherExpr, boolean elseJumpCond, ArrayList<BcIrInstr.JumpLabel> elseJumps) { ir.addAll(otherExpr.result.ir); ImmutableList<BcWriter.LocOffset> locOffset = compiler.nodeToLocOffset(otherExpr.expr); BcWriter.JumpCond jumpCond = elseJumpCond ? BcWriter.JumpCond.IF : BcWriter.JumpCond.IF_NOT; BcIrInstr.JumpLabel jumpLabel = ir.ifBr(locOffset, otherExpr.result.result.slot, jumpCond); elseJumps.add(jumpLabel); } BcCompiler.StmtFlow compileIfStatement(BcIr ir, IfStatement ifStatement) { return compileIfElse( ir, ifStatement.getCondition(), ir1 -> compiler.compileStatements(ir1, ifStatement.getThenBlock(), false), ifStatement.getElseBlock() != null ? ir1 -> compiler.compileStatements(ir1, ifStatement.getElseBlock(), false) : null); } interface Block { BcCompiler.StmtFlow compileBlock(BcIr ir); } void compileIf(BcIr ir, Expression condExpr, Block thenBlock) { compileIfElse(ir, condExpr, thenBlock, null); } BcCompiler.StmtFlow compileIfElse( BcIr ir, Expression condExpr, Block thenBlock, @Nullable Block elseBlock) { BoolExpr cond = convert(condExpr); Boolean condConst = cond.maybeConst; if (condConst != null) { compileForEffect(ir, cond); if (condConst) { return thenBlock.compileBlock(ir); } else { if (elseBlock != null) { return elseBlock.compileBlock(ir); } else { return BcCompiler.StmtFlow.GO_ON; } } } ArrayList<BcIrInstr.JumpLabel> elseAddrs = new ArrayList<>(); ArrayList<BcIrInstr.JumpLabel> thenAddrs = new ArrayList<>(); // If cond == false, jump to elseAddr, otherwise jump to then addr or just fall through. compileCond(ir, cond, false, elseAddrs, thenAddrs); ir.addJumpLabels(thenAddrs); BcCompiler.StmtFlow thenFlow = thenBlock.compileBlock(ir); if (elseBlock != null) { // TODO(nga): no need to jump if the last instruction is return BcIrInstr.JumpLabel end = ir.br(compiler.nodeToLocOffset(condExpr)); ir.addJumpLabels(elseAddrs); BcCompiler.StmtFlow elseFlow = elseBlock.compileBlock(ir); ir.add(end); return thenFlow == elseFlow ? thenFlow : BcCompiler.StmtFlow.GO_ON; } else { ir.addJumpLabels(elseAddrs); return BcCompiler.StmtFlow.GO_ON; } } /** Visitor. */ private abstract static class BoolExprMatcher { public abstract void constExpr(ConstExpr constExpr); public abstract void other(OtherExpr otherExpr); public abstract void binOp(BinOpExpr binOpExpr); public abstract void not(NotExpr notExpr); } private abstract static class BoolExpr { /** AST expression for this expression. */ final Expression expr; /** * Whether this expression evaluates to constant (yes, no, unknown). * * <p>Note const expression may still have side effects which need to be evaluated. For example, * this expression: {@code True or print(1)} is const {@code true}, but still has side effects. */ @Nullable final Boolean maybeConst; /** Evaluation of this code has side effects. */ final boolean hasEffects; protected BoolExpr(Expression expr, @Nullable Boolean maybeConst, boolean hasEffects) { this.expr = expr; this.maybeConst = maybeConst; this.hasEffects = hasEffects; } abstract void match(BoolExprMatcher matcher); @Override public abstract String toString(); } /** {@code True} or {@code False}. */ private static class ConstExpr extends BoolExpr { private final boolean value; private ConstExpr(Expression expr, boolean value) { super(expr, value, /* hasEffects */ false); this.value = value; } @Override void match(BoolExprMatcher matcher) { matcher.constExpr(this); } @Override public String toString() { return value ? "True" : "False"; } } /** Any other expression we know nothing about. */ private static class OtherExpr extends BoolExpr { private final BcCompiler.CompileExpressionResultWithIr result; private OtherExpr(Expression expr, BcCompiler.CompileExpressionResultWithIr result) { super(expr, /* maybeConst */ null, /* hasEffects */ true); this.result = result; } @Override void match(BoolExprMatcher matcher) { matcher.other(this); } @Override public String toString() { return expr.toString(); } } private enum BinOp { AND(TokenKind.AND), OR(TokenKind.OR), ; private final TokenKind tokenKind; BinOp(TokenKind tokenKind) { this.tokenKind = tokenKind; } @Override public String toString() { return tokenKind.toString(); } } /** Logical binary operator expression. */ private static class BinOpExpr extends BoolExpr { private final BoolExpr lhs; private final BoolExpr rhs; private final BinOp binOp; BinOpExpr(Expression expr, BoolExpr lhs, BoolExpr rhs, BinOp binOp) { super(expr, computeMaybeConst(lhs, rhs, binOp), lhs.hasEffects || rhs.hasEffects); this.lhs = lhs; this.rhs = rhs; this.binOp = binOp; } @Override void match(BoolExprMatcher matcher) { matcher.binOp(this); } @Nullable private static Boolean computeMaybeConst(BoolExpr lhs, BoolExpr rhs, BinOp binOp) { Boolean lhsValue = lhs.maybeConst; Boolean rhsValue = rhs.maybeConst; if (lhsValue != null) { if (lhsValue == (binOp == BinOp.AND)) { return rhsValue; } else { return lhsValue; } } else if (rhsValue != null) { if (rhsValue == (binOp == BinOp.AND)) { return lhsValue; } else { return rhsValue; } } else { return null; } } @Override public String toString() { return "(" + lhs + " " + binOp + " " + rhs + ")"; } } /** Negation. */ private static class NotExpr extends BoolExpr { private final BoolExpr arg; NotExpr(Expression expr, BoolExpr arg) { super(expr, arg.maybeConst != null ? !arg.maybeConst : null, arg.hasEffects); this.arg = arg; } @Override void match(BoolExprMatcher matcher) { matcher.not(this); } @Override public String toString() { return "not " + arg; } } /** Convert any expression to {@link BoolExpr} optimized for condition compilation. */ private BoolExpr convert(Expression expr) { if (expr instanceof UnaryOperatorExpression && ((UnaryOperatorExpression) expr).getOperator() == TokenKind.NOT) { BoolExpr simplified = convert(((UnaryOperatorExpression) expr).getX()); return new NotExpr(expr, simplified); } if (expr instanceof BinaryOperatorExpression) { BinaryOperatorExpression binExpr = (BinaryOperatorExpression) expr; if (binExpr.getOperator() == TokenKind.AND || binExpr.getOperator() == TokenKind.OR) { BoolExpr lhs = convert(binExpr.getX()); BoolExpr rhs = convert(binExpr.getY()); BinOp binOp = binExpr.getOperator() == TokenKind.AND ? BinOp.AND : BinOp.OR; return new BinOpExpr(expr, lhs, rhs, binOp); } } BcCompiler.CompileExpressionResultWithIr result = compiler.compileExpression(expr); if (result.result.value() != null && BcCompiler.isTruthImmutable(result.result.value())) { return new ConstExpr(expr, Starlark.truth(result.result.value())); } return new OtherExpr(expr, result); } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java; import com.facebook.buck.android.AndroidLibraryDescription; import com.facebook.buck.cli.BuckConfig; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.event.ThrowableConsoleEvent; import com.facebook.buck.io.MorePaths; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.json.BuildFileParseException; import com.facebook.buck.json.ProjectBuildFileParser; import com.facebook.buck.json.ProjectBuildFileParserFactory; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.parser.ParserConfig; import com.facebook.buck.rules.BuckPyFunction; import com.facebook.buck.rules.ConstructorArgMarshaller; import com.facebook.buck.util.Console; import com.google.common.base.Preconditions; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; /** * This class is responsible for finding Java source files and the buck rules that own them, given a * fully-qualified Java symbol like "com.example.foo.Bar". It does this by looking at expected * locations based on the package name of the symbol and the source roots listed in the project * config. This functionality is used to automatically generate dependency information. */ public class JavaSymbolFinder { private static ImmutableSet<String> javaRuleTypes = ImmutableSet.of( JavaLibraryDescription.TYPE.getName(), AndroidLibraryDescription.TYPE.getName(), JavaTestDescription.TYPE.getName()); private final ProjectFilesystem projectFilesystem; private final SrcRootsFinder srcRootsFinder; private final JavacOptions javacOptions; private final ConstructorArgMarshaller marshaller; private final ProjectBuildFileParserFactory projectBuildFileParserFactory; private final BuckConfig config; private final BuckEventBus buckEventBus; private final Console console; private final ImmutableMap<String, String> environment; public JavaSymbolFinder( ProjectFilesystem projectFilesystem, SrcRootsFinder srcRootsFinder, JavacOptions javacOptions, ConstructorArgMarshaller marshaller, ProjectBuildFileParserFactory projectBuildFileParserFactory, BuckConfig config, BuckEventBus buckEventBus, Console console, ImmutableMap<String, String> environment) { this.projectFilesystem = projectFilesystem; this.srcRootsFinder = srcRootsFinder; this.javacOptions = javacOptions; this.marshaller = marshaller; this.projectBuildFileParserFactory = projectBuildFileParserFactory; this.config = config; this.buckEventBus = buckEventBus; this.console = console; this.environment = environment; } /** * Figure out the build targets that provide a set of Java symbols. * @param symbols The set of symbols (e.g. "com.example.foo.Bar") to find defining targets for. * This is taken as a collection, rather than as an individual string, because * instantiating a ProjectBuildFileParser is expensive (it spawns a Python * subprocess), and we don't want to encourage the caller to do it more than once. * @return A multimap of symbols to the targets that define them, of the form: * {"com.example.a.A": set("//com/example/a:a", "//com/another/a:a")} */ public ImmutableSetMultimap<String, BuildTarget> findTargetsForSymbols(Set<String> symbols) throws InterruptedException { // TODO(oconnor663): Handle files that aren't included in any rule. // First find all the source roots in the current project. Collection<Path> srcRoots; try { srcRoots = srcRootsFinder.getAllSrcRootPaths(config.getSrcRoots()); } catch (IOException e) { buckEventBus.post(ThrowableConsoleEvent.create(e, "Error while searching for source roots.")); return ImmutableSetMultimap.of(); } // Now collect all the code files that define our symbols. Multimap<String, Path> symbolsToSourceFiles = HashMultimap.create(); for (String symbol : symbols) { symbolsToSourceFiles.putAll(symbol, getDefiningPaths(symbol, srcRoots)); } // Now find all the targets that define all those code files. We do this in one pass because we // don't want to instantiate a new parser subprocess for every symbol. Set<Path> allSourceFilePaths = ImmutableSet.copyOf(symbolsToSourceFiles.values()); Multimap<Path, BuildTarget> sourceFilesToTargets = getTargetsForSourceFiles(allSourceFilePaths); // Now build the map from from symbols to build targets. ImmutableSetMultimap.Builder<String, BuildTarget> symbolsToTargets = ImmutableSetMultimap.builder(); for (String symbol : symbolsToSourceFiles.keySet()) { for (Path sourceFile : symbolsToSourceFiles.get(symbol)) { symbolsToTargets.putAll(symbol, sourceFilesToTargets.get(sourceFile)); } } return symbolsToTargets.build(); } /** * For all the possible BUCK files above each of the given source files, parse them to JSON to * find the targets that actually include these source files, and return a map of them. We do this * over a collection of source files, rather than a single file at a time, because instantiating * the BUCK file parser is expensive. (It spawns a Python subprocess.) */ private ImmutableMultimap<Path, BuildTarget> getTargetsForSourceFiles( Collection<Path> sourceFilePaths) throws InterruptedException { Map<Path, List<Map<String, Object>>> parsedBuildFiles = Maps.newHashMap(); ImmutableSetMultimap.Builder<Path, BuildTarget> sourceFileTargetsMultimap = ImmutableSetMultimap.builder(); try (ProjectBuildFileParser parser = projectBuildFileParserFactory.createParser( marshaller, console, environment, buckEventBus)) { for (Path sourceFile : sourceFilePaths) { for (Path buckFile : possibleBuckFilesForSourceFile(sourceFile)) { List<Map<String, Object>> rules; // Avoid parsing the same BUCK file twice. if (parsedBuildFiles.containsKey(buckFile)) { rules = parsedBuildFiles.get(buckFile); } else { rules = parser.getAll(buckFile); parsedBuildFiles.put(buckFile, rules); } for (Map<String, Object> ruleMap : rules) { String type = (String) ruleMap.get(BuckPyFunction.TYPE_PROPERTY_NAME); if (javaRuleTypes.contains(type)) { @SuppressWarnings("unchecked") List<String> srcs = (List<String>) Preconditions.checkNotNull(ruleMap.get("srcs")); if (isSourceFilePathInSrcsList(sourceFile, srcs, buckFile.getParent())) { Path buckFileDir = buckFile.getParent(); String baseName = "//" + (buckFileDir != null ? MorePaths.pathWithUnixSeparators(buckFileDir) : ""); String shortName = (String) Preconditions.checkNotNull(ruleMap.get("name")); sourceFileTargetsMultimap.put( sourceFile, BuildTarget.builder( projectFilesystem.getRootPath(), baseName, shortName).build()); } } } } } } catch (BuildFileParseException e) { buckEventBus.post(ThrowableConsoleEvent.create(e, "Error while searching for targets.")); } return sourceFileTargetsMultimap.build(); } /** * The "srcs" list of a rule is given relative to the path of the BUCK file. Resolve and normalize * these paths to see if a given source file (given relative to the project root) is among them. */ private boolean isSourceFilePathInSrcsList( Path candidateFilePath, Collection<String> srcs, Path srcsDir) { Path normalizedCandidatePath = candidateFilePath.normalize(); for (String src : srcs) { Path pathForSrc = Paths.get(src).normalize(); Path projectRelativePathForSrc = (srcsDir != null ? srcsDir.resolve(pathForSrc) : pathForSrc); Path normalizedPathForSrc = projectRelativePathForSrc.normalize(); if (normalizedCandidatePath.equals(normalizedPathForSrc)) { return true; } } return false; } /** * Look at all the directories above a given source file, up to the project root, and return the * paths to any BUCK files that exist at those locations. These files are the only ones that could * define a rule that includes the given source file. */ private ImmutableList<Path> possibleBuckFilesForSourceFile(Path sourceFilePath) { ImmutableList.Builder<Path> possibleBuckFiles = ImmutableList.builder(); Path dir = sourceFilePath.getParent(); ParserConfig parserConfig = new ParserConfig(config); // For a source file like foo/bar/example.java, add paths like foo/bar/BUCK and foo/BUCK. while (dir != null) { Path buckFile = dir.resolve(parserConfig.getBuildFileName()); if (projectFilesystem.isFile(buckFile)) { possibleBuckFiles.add(buckFile); } dir = dir.getParent(); } // Finally, add ./BUCK in the root directory. Path rootBuckFile = Paths.get(parserConfig.getBuildFileName()); if (projectFilesystem.exists(rootBuckFile)) { possibleBuckFiles.add(rootBuckFile); } return possibleBuckFiles.build(); } /** * Find all Java source files that define a given fully-qualified symbol (like "com.example.a.A"). * To do this, open up all the Java files that could define it (see {@link #getCandidatePaths}) * and parse them with our Eclipse-based {@link JavaFileParser}. */ private ImmutableSortedSet<Path> getDefiningPaths(String symbol, Collection<Path> srcRoots) { ImmutableSortedSet.Builder<Path> definingPaths = ImmutableSortedSet.naturalOrder(); // TODO(shs96c): This should use the same javac env as was used for compiling the code. JavaFileParser parser = JavaFileParser.createJavaFileParser(javacOptions); for (Path candidatePath : getCandidatePaths(symbol, srcRoots)) { String content = projectFilesystem.readFileIfItExists( projectFilesystem.getPathForRelativeExistingPath(candidatePath)).get(); Set<String> symbols = parser.getExportedSymbolsFromString(content); if (symbols.contains(symbol)) { definingPaths.add(candidatePath); } } return definingPaths.build(); } /** * Guessing file names from fully-qualified Java symbols is ambiguous, because we don't know ahead * of time exactly what part of the symbol is the package, and what part is class names or static * members. This returns the set of all possible Java files for a given symbol, given different * possibilities for the package name and resolving against all the available source roots. * Returns only those candidates that actually exist. */ private ImmutableSortedSet<Path> getCandidatePaths(String symbol, Collection<Path> srcRoots) { ImmutableSortedSet.Builder<Path> candidatePaths = ImmutableSortedSet.naturalOrder(); List<String> symbolParts = Lists.newArrayList(symbol.split("\\.")); for (int symbolIndex = 0; symbolIndex < symbolParts.size(); symbolIndex++) { List<String> pathPartsList = symbolParts.subList(0, symbolIndex); String[] pathParts = pathPartsList.toArray(new String[pathPartsList.size()]); String candidateFileName = symbolParts.get(symbolIndex) + ".java"; for (Path srcRoot : srcRoots) { Path candidatePath = srcRoot.resolve(Paths.get("", pathParts)).resolve(candidateFileName); if (projectFilesystem.exists(candidatePath)) { candidatePaths.add(candidatePath); } } } return candidatePaths.build(); } }
/* See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Esri Inc. licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.esri.gpt.catalog.arcgis.metadata; import com.esri.gpt.catalog.publication.ProcessedRecord; import com.esri.gpt.catalog.publication.ProcessingContext; import com.esri.gpt.catalog.publication.PublicationRecord; import com.esri.gpt.catalog.publication.ResourceProcessor; import com.esri.arcgisws.ServiceCatalogBindingStub; import com.esri.arcgisws.ServiceDescription; import com.esri.arcgisws.runtime.exception.ArcGISWebServiceException; import com.esri.gpt.control.webharvest.IterationContext; import com.esri.gpt.control.webharvest.common.CommonResult; import com.esri.gpt.framework.resource.adapters.FlatResourcesAdapter; import com.esri.gpt.framework.resource.adapters.LimitedLengthResourcesAdapter; import com.esri.gpt.framework.resource.adapters.PublishablesAdapter; import com.esri.gpt.framework.resource.api.Native; import com.esri.gpt.framework.resource.api.Publishable; import com.esri.gpt.framework.resource.api.Resource; import com.esri.gpt.framework.resource.query.Criteria; import com.esri.gpt.framework.resource.query.Query; import com.esri.gpt.framework.resource.query.Result; import com.esri.gpt.framework.security.credentials.UsernamePasswordCredentials; import com.esri.gpt.framework.util.ReadOnlyIterator; import com.esri.gpt.framework.util.Val; import java.io.IOException; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Processes resources associated with an ArcGIS server. */ public class AGSProcessor extends ResourceProcessor { /** class variables ========================================================= */ /** Logger */ private static final Logger LOGGER = Logger.getLogger(AGSProcessor.class.getName()); /** instance variables ====================================================== */ private ServiceHandlerFactory factory = new ServiceHandlerFactory(); private AGSTarget target = new AGSTarget(); private UsernamePasswordCredentials credentials; /** constructors ============================================================ */ /** * Constructs with an associated processing context. * @param context the procesing context */ public AGSProcessor(ProcessingContext context) { super(context); if (context.getTemplate() == null) { PublicationRecord template = new PublicationRecord(); template.setUpdateOnlyIfXmlHasChanged(true); context.setTemplate(template); } } /** properties ============================================================== */ /** * Gets the ArcGIS server service handler factory. * @return the factory */ public ServiceHandlerFactory getHandlerFactory() { return this.factory; } /** * Gets the targeted ArcGIS server and resource. * @return the target */ public AGSTarget getTarget() { return this.target; } /** * Gets credentials. * @return credentials */ public UsernamePasswordCredentials getCredentials() { return credentials; } /** * Sets credentials. * @param credentials credentials */ public void setCredentials(UsernamePasswordCredentials credentials) { this.credentials = credentials; } /** methods ================================================================= */ /** * Interrogates the character response from a target resource URL attempting to * determine the REST and SOAP endpoints for an ArcGIS server services catalog. * @param url the target URL associated with the resource being interrogated * @param response the character based response previously returned from the target URL * @return <code>true</code> if the target was recognized as an ArcGIS server endpoint */ public boolean interrogate(URL url, String response) throws IOException { AGSInterrogator interragator = new AGSInterrogator(this.getContext(),this.getTarget()); interragator.interrogate(url,response); getTarget().updateTargetSoapUrl(); return getTarget().getWasRecognized(); } /** * Invokes processing against the resource. * @throws Exception if an exception occurs */ @Override public void process() throws Exception { /* * ServiceCatalogBindingStub * name: dc:title * description: dc:description * resource.url: dct:references * scheme=urn:x-esri:specification:ServiceType:ArcGIS:type * value is typically the rest endpoint for the service * type: typically dc:subject -> keyword Lucene field is dataTheme * parentType: not indexed * capabilities: not indexed * sourceUri: service rest url (by default) * * GeocodeServerBindingStub * keywords: GeocodeServer,geographicService,service,locator,geocode,geocoder * * GeoDataServerBindingStub * keywords: GeoDataServer,geographicService,service * per dataElement: publish element.getMetadata * sourceUri=serviceRestUrl/element.getName * * GeometryServerBindingStub * keywords: GeometryServer,geographicService,service,geometry,projection * * GlobeServerBindingStub * keywords: GlobeServer,liveData,service,globe * per layer: dct:abstract/rdf:value@rdf:resource=service.layername * * GPServerBindingStub * keywords: GPServer,geographicService,service,geoprocessing * per task: add task name as keyword, set service envelope is applicable * * ImageServerBindingStub * keywords: ImageServer,liveData,service,image * imageServiceInfo.description: dc:description * imageServiceInfo.extent: ows:WGS84BoundingBox * * MapServerBindingStub * keywords: ImageServer,liveData,service,image * mapServerInfo.description: dc:description * mapServerInfo.fullExtent: ows:WGS84BoundingBox * thumbnail.url: serviceRestUrl/export?size=256,256&f=image * documentInfo['Title']: dc:title * documentInfo['Author']: dc:creator * documentInfo['Comments']: dct:abstract/rdf:value@rdf:resource=mxd.comments * documentInfo['Subject']: dct:abstract/rdf:value@rdf:resource=mxd.subject * documentInfo['Category']: dct:abstract/rdf:value@rdf:resource=mxd.category * documentInfo['Keywords']: dc:subject * per layer: dct:abstract/rdf:value@rdf:resource=service.layername * * MobileServerBindingStub * keywords: MobileServer,liveData,service * * NAServerBindingStub * keywords: NAServer,geographicService,service,network,route * * WCSServer * keywords: WCSServer,liveData,service * resource.url: soapEndpoint?request=GetCapabilities&service=WCSServer * plus parent service metadata * * WFSServer * keywords: WFSServer,liveData,service * resource.url: soapEndpoint?request=GetCapabilities&service=WFSServer * plus parent service metadata * * WMSServer * keywords: WMSServer,liveData,service * resource.url: soapEndpoint?request=GetCapabilities&service=WMSServer * plus parent service metadata * */ String restUrl = this.getTarget().getRestUrl(); String soapUrl = this.getTarget().getSoapUrl(); AGSTarget.TargetType targetType = this.getTarget().getTargetType(); if ((targetType != null) && targetType.equals(AGSTarget.TargetType.ROOT)) { this.collectExistingSourceURIs(restUrl,soapUrl); } // TODO: check the TargetType // determine the target (entire server, a folder or a service) getTarget().updateTargetSoapUrl(); String targetSoapUrl = getTarget().getTargetSoapUrl(); boolean matchAll = targetSoapUrl.equals(soapUrl); boolean checkFolder = !targetSoapUrl.endsWith("Server"); // loop through the service descriptions ServiceCatalogBindingStub stub = new ServiceCatalogBindingStub(soapUrl); for (ServiceDescription desc: stub.getServiceDescriptions()) { if (Thread.currentThread().isInterrupted()) return; String currentSoapUrl = desc.getUrl(); String currentRestUrl = currentSoapUrl.replace(soapUrl,restUrl); // determine if there is a metch boolean matchesTarget = false; if (!matchAll) { matchesTarget = targetSoapUrl.equals(currentSoapUrl); if (!matchesTarget && checkFolder) { matchesTarget = currentSoapUrl.startsWith(targetSoapUrl+"/"); } } if (matchAll || matchesTarget) { // make a handler for this service type ServiceHandler handler = this.factory.makeHandler(desc.getType()); handler.setCredentials(getCredentials()); if (handler != null) { // initialize service information ServiceInfo info = handler.createServiceInfo(null, desc, currentRestUrl, currentSoapUrl); // collect try { LOGGER.log(Level.FINER, "Collecting metadata for: {0}", Val.stripControls(info.getSoapUrl())); handler.collectMetadata(this,info); } catch (Exception e) { ProcessedRecord processedRcord = new ProcessedRecord(); processedRcord.setSourceUri(info.getResourceUrl()); processedRcord.setStatusType(ProcessedRecord.StatusType.FAILED); processedRcord.setException(e,this.getContext().getMessageBroker()); this.getContext().incrementNumberFailed(); this.getContext().setLastException(e); this.getContext().getProcessedRecords().add(processedRcord); LOGGER.log(Level.FINER,"Error\n"+Val.stripControls(processedRcord.getSourceUri()),e); } // publish try { LOGGER.log(Level.FINER, "Publishing metadata for: {0}", info.getResourceUrl()); handler.publishMetadata(this,info); } catch (Exception e) { LOGGER.log(Level.FINER,"Error during publication.",e); } // break if we have found a single targetted service if (!matchAll && !checkFolder) { break; } } } } // cleanup unreferenced source URIs if ((targetType != null) && targetType.equals(AGSTarget.TargetType.ROOT)) { this.deleteUnreferencedSourceURIs(); } } @Override public Query createQuery(final IterationContext context, final Criteria criteria) { return new Query() { @Override public Result execute() { ResourceFolders folders = createResourceFolders(context); return new CommonResult(new LimitedLengthResourcesAdapter(folders, criteria.getMaxRecords())); } }; } @Override public Native getNativeResource(IterationContext context) { ResourceFolders folders = createResourceFolders(context); for (Publishable publishable : new PublishablesAdapter(new FlatResourcesAdapter(folders))) { if (publishable instanceof Native) { return (Native)publishable; } break; } return null; } /** * Normalizes URL by removing 'wsdl'. * @param url URL * @return normalized URL */ private String normalizeUrl(String url) { Pattern wsdlPattern = Pattern.compile("\\?wsdl$", Pattern.CASE_INSENSITIVE); Matcher wsdlMatcher = wsdlPattern.matcher(Val.chkStr(url)); String wsdlResult = wsdlMatcher.replaceFirst(""); Pattern servicesPattern = Pattern.compile("services\\?wsdl/", Pattern.CASE_INSENSITIVE); Matcher servicesMatcher = servicesPattern.matcher(wsdlResult); String servicesResult = servicesMatcher.replaceAll(""); return servicesResult.replaceAll("/+$", ""); /* return Pattern.compile("services\\?wsdl/", Pattern.CASE_INSENSITIVE).matcher( Pattern.compile("\\?wsdl$", Pattern.CASE_INSENSITIVE).matcher( Val.chkStr(url) ).replaceFirst("") ).replaceFirst(""); */ } /** * Extracts root URL. * @param url URL * @return root URL */ private String extractRootUrl(String url) { url = Val.chkStr(url); try { URI uri = new URI(url); Matcher matcher = Pattern.compile("^/[^/]*(/services)?",Pattern.CASE_INSENSITIVE).matcher(uri.getPath()); if (matcher.find()) { return uri.getScheme() + "://" + uri.getAuthority() + matcher.group(); } else { return url; } } catch (Exception ex) { return url; } } /** * Reads service descriptions. * @return array of service descriptions * @throws ArcGISWebServiceException if accessing service descriptions */ private ServiceDescription[] readServiceDescriptions() throws ArcGISWebServiceException { String soapUrl = extractRootUrl(getTarget().getSoapUrl()); ServiceCatalogBindingStub stub = new ServiceCatalogBindingStub(soapUrl); ServiceDescription[] descriptors = stub.getServiceDescriptions(); return descriptors; } /** * Creates resource folders. * @param context iteration context * @return resource folders */ private ResourceFolders createResourceFolders(IterationContext context) { try { ServiceDescription[] descriptors = readServiceDescriptions(); return new ResourceFolders(context, factory, descriptors); } catch (ArcGISWebServiceException ex) { context.onIterationException(ex); return new ResourceFolders(context, factory, new ServiceDescription[]{}); } } /** * ArcGIS folders. */ private class ResourceFolders implements Iterable<IServiceInfoProvider> { /** iteration context */ private IterationContext context; /** service handler factory */ private ServiceHandlerFactory factory; /** service descriptors */ private ServiceDescription[] descriptors; /** normalized target SOAP URL */ private String normalizedTargetSoapUrl; /** indicator to match everything or only selected service */ private boolean matchAll; /** indicator to check folder */ private boolean checkFolder; private HashMap<ServiceDescription,ServiceDescription> childToParent = new HashMap<ServiceDescription, ServiceDescription>(); private HashMap<ServiceDescription,ServiceInfo> sdToSi = new HashMap<ServiceDescription, ServiceInfo>(); /** * Creates instance of the folders. * @param context iteration context * @param factory service handler factory * @param descriptors service descriptors */ public ResourceFolders(IterationContext context, ServiceHandlerFactory factory, ServiceDescription[] descriptors) { if (context==null) throw new IllegalArgumentException("No context provided."); if (factory==null) throw new IllegalArgumentException("No factory provided."); if (descriptors==null) throw new IllegalArgumentException("No descriptors provided."); this.context = context; this.factory = factory; this.descriptors = descriptors; this.normalizedTargetSoapUrl = normalizeUrl(getTarget().getTargetSoapUrl()); this.matchAll = normalizedTargetSoapUrl.equalsIgnoreCase(extractRootUrl(getTarget().getSoapUrl())); this.checkFolder = !normalizedTargetSoapUrl.endsWith("Server"); HashMap<String,ServiceDescription> urlToSD = new HashMap<String, ServiceDescription>(); for (ServiceDescription sd: descriptors) { String url = sd.getUrl(); urlToSD.put(url, sd); } for (ServiceDescription sd: descriptors) { if (sd.getParentType().isEmpty()) continue; int index = sd.getUrl().indexOf(sd.getParentType()) + sd.getParentType().length(); String url = sd.getUrl().substring(0, index); ServiceDescription parentSD = urlToSD.get(url); childToParent.put(sd, parentSD); } } public Iterator<IServiceInfoProvider> iterator() { return new AGSRecordsIterator(); } /** * ArcGIS folders iterator. */ private class AGSRecordsIterator extends ReadOnlyIterator<IServiceInfoProvider> { /** index of the current folder */ private int index = -1; /** service handler */ private ServiceHandler handler = null; /** service info */ private ServiceInfo info = null; /** * Resets current service information. */ private void reset() { handler = null; info = null; } public boolean hasNext() { if (handler!=null && info!=null) return true; if (index+1>=descriptors.length) return false; ServiceDescription desc = descriptors[++index]; String currentSoapUrl = desc.getUrl(); String currentRestUrl = Pattern.compile("\\Q"+getTarget().getSoapUrl()+"\\E", Pattern.CASE_INSENSITIVE).matcher(currentSoapUrl).replaceFirst(getTarget().getRestUrl()); boolean matchesTarget = false; if (!matchAll) { matchesTarget = normalizedTargetSoapUrl.equalsIgnoreCase(currentSoapUrl); if (!matchesTarget && checkFolder) { matchesTarget = currentSoapUrl.toLowerCase().startsWith(normalizedTargetSoapUrl.toLowerCase()+"/"); } } if (!(matchAll || matchesTarget)) return hasNext(); handler = factory.makeHandler(desc.getType()); if (handler==null) return hasNext(); handler.setCredentials(getCredentials()); // get parent description if available for the current description ServiceDescription parentDesc = childToParent.get(desc); // get service info for the parent ServiceInfo parentInfo = sdToSi.get(parentDesc); // create servcice info for the current service description info = handler.createServiceInfo(parentInfo, desc, currentRestUrl, currentSoapUrl); // store mapping between service descritpion and service info sdToSi.put(desc, info); return true; } @Override public IServiceInfoProvider next() { final ResourceRecordsFamily family = new ResourceRecordsFamily(context, factory, handler, info, !matchAll && !checkFolder); reset(); return new ServiceInfoProvider(info) { @Override public Iterable<Resource> getNodes() { return family; } }; } } } /** * Family of the records. This is a collection of records derived from the same * service URL. */ private class ResourceRecordsFamily implements Iterable<Resource> { /** iteration context */ private IterationContext context; /** service handler factory */ private ServiceHandlerFactory factory; /** service handler */ private ServiceHandler handler; /** service info */ private ServiceInfo info; /** is native */ private boolean isNative; /** * Creates instance of the records family. * @param context iteration context * @param factory service handler factory * @param handler service handler * @param info service info * @param isNative <code>true</code> to append native record */ public ResourceRecordsFamily(IterationContext context, ServiceHandlerFactory factory, ServiceHandler handler, ServiceInfo info, boolean isNative) { if (context==null) throw new IllegalArgumentException("No context provided."); if (factory==null) throw new IllegalArgumentException("No factory provided."); if (handler==null) throw new IllegalArgumentException("No handler provided."); if (info==null) throw new IllegalArgumentException("No info provided."); this.context = context; this.factory = factory; this.handler = handler; this.info = info; this.isNative = isNative; } public Iterator<Resource> iterator() { ArrayList<Resource> recs = new ArrayList<Resource>(); try { handler.appendRecord(recs, factory, info, isNative); } catch (Exception ex) { context.onIterationException(ex); } return recs.iterator(); } } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.StoredEventHandler; import com.google.devtools.build.lib.packages.PackageValidator.InvalidPackageException; import com.google.devtools.build.lib.packages.util.PackageFactoryTestBase; import com.google.devtools.build.lib.syntax.ParserInput; import com.google.devtools.build.lib.syntax.StarlarkFile; import com.google.devtools.build.lib.syntax.StarlarkSemantics; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.RootedPath; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Set; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Unit tests for {@code PackageFactory}. */ @RunWith(JUnit4.class) public class PackageFactoryTest extends PackageFactoryTestBase { @Test public void testCreatePackage() throws Exception { Path buildFile = scratch.file("/pkgname/BUILD", "# empty build file "); Package pkg = packages.createPackage("pkgname", RootedPath.toRootedPath(root, buildFile)); assertThat(pkg.getName()).isEqualTo("pkgname"); assertThat(Sets.newHashSet(pkg.getTargets(Rule.class))).isEmpty(); } @Test public void testBadRuleName() throws Exception { events.setFailFast(false); Path buildFile = scratch.file("/badrulename/BUILD", "cc_library(name = 3)"); Package pkg = packages.createPackage("badrulename", RootedPath.toRootedPath(root, buildFile)); events.assertContainsError("cc_library 'name' attribute must be a string"); assertThat(pkg.containsErrors()).isTrue(); } @Test public void testNoRuleName() throws Exception { events.setFailFast(false); Path buildFile = scratch.file("/badrulename/BUILD", "cc_library()"); Package pkg = packages.createPackage("badrulename", RootedPath.toRootedPath(root, buildFile)); events.assertContainsError("cc_library rule has no 'name' attribute"); assertThat(pkg.containsErrors()).isTrue(); } @Test public void testBadPackageName() throws Exception { NoSuchPackageException e = assertThrows( NoSuchPackageException.class, () -> packages.createPackage( "not even a legal/.../label", RootedPath.toRootedPath(root, emptyBuildFile("not even a legal/.../label")))); assertThat(e) .hasMessageThat() .contains( "no such package 'not even a legal/.../label': " + "illegal package name: 'not even a legal/.../label' "); } @Test public void testColonInExportsFilesTargetName() throws Exception { events.setFailFast(false); Path path = scratch.file( "/googledata/cafe/BUILD", "exports_files(['houseads/house_ads:ca-aol_parenting_html'])"); Package pkg = packages.createPackage("googledata/cafe", RootedPath.toRootedPath(root, path)); events.assertContainsError("target names may not contain ':'"); assertThat(pkg.getTargets(FileTarget.class).toString()) .doesNotContain("houseads/house_ads:ca-aol_parenting_html"); assertThat(pkg.containsErrors()).isTrue(); } @Test public void testExportsFilesVisibilityMustBeSequence() throws Exception { expectEvalError( "in call to exports_files(), parameter 'visibility' got value of type 'depset', want" + " 'sequence or NoneType'", "exports_files(srcs=[], visibility=depset(['notice']))"); } @Test public void testExportsFilesLicensesMustBeSequence() throws Exception { expectEvalError( "in call to exports_files(), parameter 'licenses' got value of type 'depset', want" + " 'sequence or NoneType'", "exports_files(srcs=[], licenses=depset(['notice']))"); } @Test public void testPackageNameWithPROTECTEDIsOk() throws Exception { events.setFailFast(false); // One "PROTECTED": assertThat(isValidPackageName("foo/PROTECTED/bar")).isTrue(); // Multiple "PROTECTED"s: assertThat(isValidPackageName("foo/PROTECTED/bar/PROTECTED/wiz")).isTrue(); } @Test public void testDuplicatedDependencies() throws Exception { events.setFailFast(false); Path buildFile = scratch.file( "/has_dupe/BUILD", "cc_library(name='dep')", "cc_library(name='has_dupe', deps=[':dep', ':dep'])"); Package pkg = packages.createPackage("has_dupe", RootedPath.toRootedPath(root, buildFile)); events.assertContainsError( "Label '//has_dupe:dep' is duplicated in the 'deps' " + "attribute of rule 'has_dupe'"); assertThat(pkg.containsErrors()).isTrue(); assertThat(pkg.getRule("has_dupe")).isNotNull(); assertThat(pkg.getRule("dep")).isNotNull(); assertThat(pkg.getRule("has_dupe").containsErrors()).isTrue(); assertThat(pkg.getRule("dep").containsErrors()).isTrue(); // because all rules in an // errant package are // themselves errant. } @Test public void testPrefixWithinSameRule1() throws Exception { events.setFailFast(false); Path buildFile = scratch.file( "/fruit/orange/BUILD", "genrule(name='orange', srcs=[], outs=['a', 'a/b'], cmd='')"); packages.createPackage("fruit/orange", RootedPath.toRootedPath(root, buildFile)); events.assertContainsError("rule 'orange' has conflicting output files 'a/b' and 'a"); } @Test public void testPrefixWithinSameRule2() throws Exception { events.setFailFast(false); Path buildFile = scratch.file( "/fruit/orange/BUILD", "genrule(name='orange', srcs=[], outs=['a/b', 'a'], cmd='')"); packages.createPackage("fruit/orange", RootedPath.toRootedPath(root, buildFile)); events.assertContainsError("rule 'orange' has conflicting output files 'a' and 'a/b"); } @Test public void testPrefixBetweenRules1() throws Exception { events.setFailFast(false); Path buildFile = scratch.file( "/fruit/kiwi/BUILD", "genrule(name='kiwi1', srcs=[], outs=['a'], cmd='')", "genrule(name='kiwi2', srcs=[], outs=['a/b'], cmd='')"); packages.createPackage("fruit/kiwi", RootedPath.toRootedPath(root, buildFile)); events.assertContainsError( "output file 'a/b' of rule 'kiwi2' conflicts " + "with output file 'a' of rule 'kiwi1'"); } @Test public void testPrefixBetweenRules2() throws Exception { events.setFailFast(false); Path buildFile = scratch.file( "/fruit/kiwi/BUILD", "genrule(name='kiwi1', srcs=[], outs=['a/b'], cmd='')", "genrule(name='kiwi2', srcs=[], outs=['a'], cmd='')"); packages.createPackage("fruit/kiwi", RootedPath.toRootedPath(root, buildFile)); events.assertContainsError( "output file 'a' of rule 'kiwi2' conflicts " + "with output file 'a/b' of rule 'kiwi1'"); } @Test public void testPackageNameFunction() throws Exception { Path buildFile = scratch.file("/pina/BUILD", "cc_library(name=package_name() + '-colada')"); Package pkg = packages.createPackage("pina", RootedPath.toRootedPath(root, buildFile)); events.assertNoWarningsOrErrors(); assertThat(pkg.containsErrors()).isFalse(); assertThat(pkg.getRule("pina-colada")).isNotNull(); assertThat(pkg.getRule("pina-colada").containsErrors()).isFalse(); assertThat(Sets.newHashSet(pkg.getTargets(Rule.class)).size()).isSameInstanceAs(1); } @Test public void testPackageFunctionInExternalRepository() throws Exception { Path buildFile = scratch.file( "/external/a/b/BUILD", "genrule(name='c', srcs=[], outs=['o'], cmd=repository_name() + ' ' + package_name())"); Package pkg = packages.createPackage( PackageIdentifier.create("@a", PathFragment.create("b")), RootedPath.toRootedPath(root, buildFile), events.reporter()); Rule c = pkg.getRule("c"); assertThat(AggregatingAttributeMapper.of(c).get("cmd", Type.STRING)).isEqualTo("@a b"); } @Test public void testDuplicateRuleName() throws Exception { events.setFailFast(false); Path buildFile = scratch.file( "/duplicaterulename/BUILD", "proto_library(name = 'spellcheck_proto',", " srcs = ['spellcheck.proto'],", " cc_api_version = 2)", "cc_library(name = 'spellcheck_proto')", // conflict error stops execution "x = 1//0"); // not reached Package pkg = packages.createPackage("duplicaterulename", RootedPath.toRootedPath(root, buildFile)); events.assertContainsError( "cc_library rule 'spellcheck_proto' in package 'duplicaterulename' conflicts with existing" + " proto_library rule"); events.assertDoesNotContainEvent("division by zero"); assertThat(pkg.containsErrors()).isTrue(); } @Test public void testBuildFileTargetExists() throws Exception { Path buildFile = scratch.file("/foo/BUILD", ""); Package pkg = packages.createPackage("foo", RootedPath.toRootedPath(root, buildFile)); Target target = pkg.getTarget("BUILD"); assertThat(target.getName()).isEqualTo("BUILD"); // Test that it's memoized: assertThat(pkg.getTarget("BUILD")).isSameInstanceAs(target); } @Test public void testCreationOfInputFiles() throws Exception { Path buildFile = scratch.file( "/foo/BUILD", "exports_files(['Z'])", "cc_library(name='W', deps=['X', 'Y'])", "cc_library(name='X', srcs=['X'])", "cc_library(name='Y')"); Package pkg = packages.createPackage("foo", RootedPath.toRootedPath(root, buildFile)); assertThat(pkg.containsErrors()).isFalse(); // X is a rule with a circular self-dependency. assertThat(pkg.getTarget("X").getClass()).isSameInstanceAs(Rule.class); // Y is a rule assertThat(pkg.getTarget("Y").getClass()).isSameInstanceAs(Rule.class); // Z is a file assertThat(pkg.getTarget("Z").getClass()).isSameInstanceAs(InputFile.class); // A is nothing NoSuchTargetException e = assertThrows(NoSuchTargetException.class, () -> pkg.getTarget("A")); assertThat(e) .hasMessageThat() .isEqualTo( "no such target '//foo:A': " + "target 'A' not declared in package 'foo' defined by /foo/BUILD"); // These are the only input files: BUILD, Z Set<String> inputFiles = Sets.newTreeSet(); for (InputFile inputFile : pkg.getTargets(InputFile.class)) { inputFiles.add(inputFile.getName()); } assertThat(Lists.newArrayList(inputFiles)).containsExactly("BUILD", "Z").inOrder(); } @Test public void testDuplicateRuleIsNotAddedToPackage() throws Exception { events.setFailFast(false); Path path = scratch.file( "/dup/BUILD", "proto_library(name = 'dup_proto',", " srcs = ['dup.proto'],", " cc_api_version = 2)", "", "cc_library(name = 'dup_proto',", " srcs = ['dup.pb.cc', 'dup.pb.h'])"); Package pkg = packages.createPackage("dup", RootedPath.toRootedPath(root, path)); events.assertContainsError( "cc_library rule 'dup_proto' in package 'dup' " + "conflicts with existing proto_library rule"); assertThat(pkg.containsErrors()).isTrue(); Rule dupProto = pkg.getRule("dup_proto"); // Check that the first rule of the given name "wins", and that each of the // "winning" rule's outputs is a member of the package. assertThat(dupProto.getRuleClass()).isEqualTo("proto_library"); for (OutputFile out : dupProto.getOutputFiles()) { assertThat(pkg.getTargets(FileTarget.class)).contains(out); } } @Test public void testConflictingRuleDoesNotUpdatePackage() throws Exception { events.setFailFast(false); // In this test, rule2's outputs conflict with rule1, so rule2 is rejected. // However, we must check that neither rule2, nor any of its inputs or // outputs is a member of the package, and that the conflicting output file // "out2" still has rule1 as its getGeneratingRule(). Path path = scratch.file( "/conflict/BUILD", "genrule(name = 'rule1',", " cmd = '',", " srcs = ['in1', 'in2'],", " outs = ['out1', 'out2'])", "genrule(name = 'rule2',", " cmd = '',", " srcs = ['in3', 'in4'],", " outs = ['out3', 'out2'])"); Package pkg = packages.createPackage("conflict", RootedPath.toRootedPath(root, path)); events.assertContainsError( "generated file 'out2' in rule 'rule2' " + "conflicts with existing generated file from rule 'rule1'"); assertThat(pkg.containsErrors()).isTrue(); assertThat(pkg.getRule("rule2")).isNull(); // Ensure that rule2's "out2" didn't overwrite rule1's: assertThat(((OutputFile) pkg.getTarget("out2")).getGeneratingRule()) .isSameInstanceAs(pkg.getRule("rule1")); // None of rule2, its inputs, or its outputs should belong to pkg: List<Target> found = new ArrayList<>(); for (String targetName : ImmutableList.of("rule2", "in3", "in4", "out3")) { try { found.add(pkg.getTarget(targetName)); // No fail() here: if there's no exception, we add the name to a list // and we check below that it's empty. } catch (NoSuchTargetException e) { /* good! */ } } assertThat(found).isEmpty(); } // Was: Regression test for bug "Rules declared after an error in // a package should be considered 'in error'". // Then: Regression test for bug "Why aren't ERRORS considered // fatal?*" // Now: Regression test for: execution should stop at the first EvalException; // all rules created prior to the exception error are marked in error. @Test public void testAllRulesInErrantPackageAreInError() throws Exception { events.setFailFast(false); Path path = scratch.file( "/error/BUILD", "genrule(name = 'rule1',", " cmd = ':',", " outs = ['out.1'])", "list = ['bad']", "x = 1//0", // dynamic error "genrule(name = 'rule2',", " cmd = ':',", " outs = list)"); Package pkg = packages.createPackage("error", RootedPath.toRootedPath(root, path)); events.assertContainsError("division by zero"); assertThat(pkg.containsErrors()).isTrue(); // rule1 would be fine but is still marked as in error: assertThat(pkg.getRule("rule1").containsErrors()).isTrue(); // rule2's genrule is never executed. Rule rule2 = pkg.getRule("rule2"); assertThat(rule2).isNull(); } @Test public void testHelpfulErrorForMissingExportsFiles() throws Exception { Path path = scratch.file("/x/BUILD", "cc_library(name='x', srcs=['x.cc'])"); scratch.file("/x/x.cc"); scratch.file("/x/y.cc"); scratch.file("/x/dir/dummy"); Package pkg = packages.createPackage("x", RootedPath.toRootedPath(root, path)); assertThat(pkg.getTarget("x.cc")).isNotNull(); // existing and mentioned. NoSuchTargetException e = assertThrows(NoSuchTargetException.class, () -> pkg.getTarget("y.cc")); assertThat(e) .hasMessageThat() .isEqualTo( "no such target '//x:y.cc': " + "target 'y.cc' not declared in package 'x'; " + "however, a source file of this name exists. " + "(Perhaps add 'exports_files([\"y.cc\"])' to x/BUILD?) " + "defined by /x/BUILD"); e = assertThrows(NoSuchTargetException.class, () -> pkg.getTarget("z.cc")); assertThat(e) .hasMessageThat() .isEqualTo( "no such target '//x:z.cc': " + "target 'z.cc' not declared in package 'x' (did you mean 'x.cc'?) " + "defined by /x/BUILD"); e = assertThrows(NoSuchTargetException.class, () -> pkg.getTarget("dir")); assertThat(e) .hasMessageThat() .isEqualTo( "no such target '//x:dir': target 'dir' not declared in package 'x'; " + "however, a source directory of this name exists. " + "(Perhaps add 'exports_files([\"dir\"])' to x/BUILD, " + "or define a filegroup?) defined by /x/BUILD"); } @Test public void testTestSuitesImplicitlyDependOnAllRulesInPackage() throws Exception { Path path = scratch.file( "/x/BUILD", "java_test(name='j')", "test_suite(name='t1')", "test_suite(name='t2', tests=['//foo'])", "test_suite(name='t3', tests=['//foo'])", "cc_test(name='c')"); Package pkg = packages.createPackage("x", RootedPath.toRootedPath(root, path)); // Things to note: // - the t1 refers to both :j and :c, even though :c is a forward reference. // - $implicit_tests is empty unless tests=[] assertThat(attributes(pkg.getRule("t1")).get("$implicit_tests", BuildType.LABEL_LIST)) .containsExactlyElementsIn( Sets.newHashSet( Label.parseAbsolute("//x:c", ImmutableMap.of()), Label.parseAbsolute("//x:j", ImmutableMap.of()))); assertThat(attributes(pkg.getRule("t2")).get("$implicit_tests", BuildType.LABEL_LIST)) .isEmpty(); assertThat(attributes(pkg.getRule("t3")).get("$implicit_tests", BuildType.LABEL_LIST)) .isEmpty(); } @Test public void testPackageValidationFailureRegisteredAfterLoading() throws Exception { Path path = scratch.file("/x/BUILD", "sh_library(name='y')"); dummyPackageValidator.setImpl( (pkg, eventHandler) -> { if (pkg.getName().equals("x")) { eventHandler.handle(Event.warn("warning event")); throw new InvalidPackageException(pkg.getPackageIdentifier(), "nope"); } }); Package pkg = packages.createPackage("x", RootedPath.toRootedPath(root, path)); assertThat(pkg.containsErrors()).isFalse(); StoredEventHandler eventHandler = new StoredEventHandler(); InvalidPackageException expected = assertThrows( InvalidPackageException.class, () -> packages .factory() .afterDoneLoadingPackage( pkg, StarlarkSemantics.DEFAULT_SEMANTICS, /*loadTimeNanos=*/ 0, eventHandler)); assertThat(expected).hasMessageThat().contains("no such package 'x': nope"); assertThat(eventHandler.getEvents()).containsExactly(Event.warn("warning event")); } @Test public void testGlobDirectoryExclusion() throws Exception { emptyFile("/fruit/data/apple"); emptyFile("/fruit/data/pear"); emptyFile("/fruit/data/berry/black"); emptyFile("/fruit/data/berry/blue"); Path file = scratch.file( "/fruit/BUILD", "cc_library(name = 'yes', srcs = glob(['data/*']))", "cc_library(name = 'no', srcs = glob(['data/*'], exclude_directories=0))"); Package pkg = packages.eval("fruit", RootedPath.toRootedPath(root, file)); events.assertNoWarningsOrErrors(); List<Label> yesFiles = attributes(pkg.getRule("yes")).get("srcs", BuildType.LABEL_LIST); List<Label> noFiles = attributes(pkg.getRule("no")).get("srcs", BuildType.LABEL_LIST); assertThat(yesFiles) .containsExactly( Label.parseAbsolute("@//fruit:data/apple", ImmutableMap.of()), Label.parseAbsolute("@//fruit:data/pear", ImmutableMap.of())); assertThat(noFiles) .containsExactly( Label.parseAbsolute("@//fruit:data/apple", ImmutableMap.of()), Label.parseAbsolute("@//fruit:data/pear", ImmutableMap.of()), Label.parseAbsolute("@//fruit:data/berry", ImmutableMap.of())); } // TODO(bazel-team): This is really a test for GlobCache. @Test public void testRecursiveGlob() throws Exception { emptyFile("/rg/a.cc"); emptyFile("/rg/foo/bar.cc"); emptyFile("/rg/foo/foo.cc"); emptyFile("/rg/foo/wiz/bam.cc"); emptyFile("/rg/foo/wiz/bum.cc"); emptyFile("/rg/foo/wiz/quid/gav.cc"); Path file = scratch.file( "/rg/BUILD", "cc_library(name = 'ri', srcs = glob(['**/*.cc']))", "cc_library(name = 're', srcs = glob(['*.cc'], exclude=['**/*.c']))"); Package pkg = packages.eval("rg", RootedPath.toRootedPath(root, file)); events.assertNoWarningsOrErrors(); assertEvaluates( pkg, ImmutableList.of( "BUILD", "a.cc", "foo", "foo/bar.cc", "foo/foo.cc", "foo/wiz", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid", "foo/wiz/quid/gav.cc"), "**"); assertEvaluates( pkg, ImmutableList.of( "a.cc", "foo/bar.cc", "foo/foo.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"), "**/*.cc"); assertEvaluates( pkg, ImmutableList.of("foo/bar.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc"), "**/b*.cc"); assertEvaluates( pkg, ImmutableList.of( "foo/bar.cc", "foo/foo.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"), "**/*/*.cc"); assertEvaluates(pkg, ImmutableList.of("foo/wiz/quid/gav.cc"), "foo/**/quid/*.cc"); assertEvaluates( pkg, Collections.<String>emptyList(), ImmutableList.of("*.cc", "*/*.cc", "*/*/*.cc"), ImmutableList.of("**/*.cc")); assertEvaluates( pkg, Collections.<String>emptyList(), ImmutableList.of("**/*.cc"), ImmutableList.of("**/*.cc")); assertEvaluates( pkg, Collections.<String>emptyList(), ImmutableList.of("**/*.cc"), ImmutableList.of("*.cc", "*/*.cc", "*/*/*.cc", "*/*/*/*.cc")); assertEvaluates( pkg, Collections.<String>emptyList(), ImmutableList.of("**"), ImmutableList.of("*", "*/*", "*/*/*", "*/*/*/*")); assertEvaluates( pkg, ImmutableList.of( "foo/bar.cc", "foo/foo.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"), ImmutableList.of("**/*.cc"), ImmutableList.of("*.cc")); assertEvaluates( pkg, ImmutableList.of("a.cc", "foo/wiz/bam.cc", "foo/wiz/bum.cc", "foo/wiz/quid/gav.cc"), ImmutableList.of("**/*.cc"), ImmutableList.of("*/*.cc")); assertEvaluates( pkg, ImmutableList.of("a.cc", "foo/bar.cc", "foo/foo.cc", "foo/wiz/quid/gav.cc"), ImmutableList.of("**/*.cc"), ImmutableList.of("**/wiz/*.cc")); } @Test public void testTooManyArgumentsGlobErrors() throws Exception { events.setFailFast(false); assertGlobFails( "glob(['incl'],['excl'],3,True,'extraarg')", "glob() accepts no more than 4 positional arguments but got 5"); } @Test public void testGlobEnforcesListArgument() throws Exception { events.setFailFast(false); assertGlobFails( "glob(1, exclude=2)", "in call to glob(), parameter 'include' got value of type 'int', want 'sequence'"); } @Test public void testGlobEnforcesListOfStringsArguments() throws Exception { events.setFailFast(false); assertGlobFails( "glob(['a', 'b'], exclude=['c', 42])", "expected value of type 'string' for element 1 of 'glob' argument, but got 42 (int)"); } @Test public void testGlobNegativeTest() throws Exception { // Negative test that assertGlob does throw an error when asserting against the wrong values. IllegalArgumentException e = assertThrows( IllegalArgumentException.class, () -> assertGlobMatches( /*result=*/ ImmutableList.of("Wombat1.java", "This_file_doesn_t_exist.java"), /*includes=*/ ImmutableList.of("W*", "subdir"), /*excludes=*/ ImmutableList.<String>of(), /* excludeDirs= */ true)); assertThat(e).hasMessageThat().isEqualTo("ERROR /globs/BUILD:2:77: incorrect glob result"); } @Test public void testGlobExcludeDirectories() throws Exception { assertGlobMatches( /*result=*/ ImmutableList.of("Wombat1.java", "Wombat2.java"), /*includes=*/ ImmutableList.of("W*", "subdir"), /*excludes=*/ ImmutableList.<String>of(), /* excludeDirs= */ true); } @Test public void testGlobDoesNotExcludeDirectories() throws Exception { assertGlobMatches( /*result=*/ ImmutableList.of("Wombat1.java", "Wombat2.java", "subdir"), /*includes=*/ ImmutableList.of("W*", "subdir"), /*excludes=*/ ImmutableList.<String>of(), /* excludeDirs= */ false); } @Test public void testGlobWithEmptyExcludedList() throws Exception { assertGlobMatches( /*result=*/ ImmutableList.of("Wombat1.java", "Wombat2.java"), /*includes=*/ ImmutableList.of("W*"), /*excludes=*/ Collections.<String>emptyList(), /* excludeDirs= */ false); } @Test public void testGlobWithQuestionMarkProducesError() throws Exception { assertGlobProducesError("Wombat?.java", true); } @Test public void testGlobWithoutQuestionMarkDoesntProduceError() throws Exception { assertGlobProducesError("Wombat*.java", false); } @Test public void testGlobWithNonMatchingExcludedList() throws Exception { assertGlobMatches( /*result=*/ ImmutableList.of("Wombat1.java"), /*includes=*/ ImmutableList.of("W*"), /*excludes=*/ ImmutableList.of("*2*"), /* excludeDirs= */ false); } @Test public void testGlobWithTwoMatchingGlobExpressionsAndNonmatchingExclusion() throws Exception { assertGlobMatches( /*result=*/ ImmutableList.of("Wombat1.java", "subdir/Wombat3.java"), /*includes=*/ ImmutableList.of("W*", "subdir/W*"), /*excludes=*/ ImmutableList.of("*2*"), /* excludeDirs= */ false); } @Test public void testGlobWithSubdirMatchAndExclusion() throws Exception { assertGlobMatches( /*result=*/ ImmutableList.of("subdir/Wombat3.java"), /*includes=*/ ImmutableList.of("W*", "subdir/W*"), /*excludes=*/ ImmutableList.of("Wombat*.java"), /* excludeDirs= */ false); } @Test public void testBadCharacterInGlob() throws Exception { events.setFailFast(false); assertGlobFails("glob(['?'])", "glob pattern '?' contains forbidden '?' wildcard"); } @Test public void testBadExcludePattern() throws Exception { events.setFailFast(false); // The 'exclude' check is currently only reached if the pattern is "complex". // This seems like a bug: // assertGlobFails("glob(['BUILD'], ['/'])", "pattern cannot be absolute"); assertGlobFails("glob(['BUILD'], ['/*/*'])", "pattern cannot be absolute"); } @Test public void testGlobEscapesAt() throws Exception { // See lib.skyframe.PackageFunctionTest.globEscapesAt and // https://github.com/bazelbuild/bazel/issues/10606. scratch.file("/p/@f.txt"); Path file = scratch.file("/p/BUILD", "print(glob(['*.txt'])[0])"); events.setFailFast(false); // we need this to use print (!) packages.eval("p", RootedPath.toRootedPath(root, file)); events.assertNoWarningsOrErrors(); events.assertContainsDebug(":@f.txt"); // observe prepended colon } /** * Tests that a glob evaluation that encounters an I/O error throws instead of constructing a * package. */ @Test public void testGlobWithIOErrors() throws Exception { events.setFailFast(false); scratch.dir("/pkg"); scratch.dir("/pkg/globs"); Path unreadableSubdir = scratch.resolve("/pkg/globs/unreadable_subdir"); unreadableSubdir.createDirectory(); unreadableSubdir.setReadable(false); Path file = scratch.file("/pkg/BUILD", "cc_library(name = 'c', srcs = glob(['globs/**']))"); assertThrows( NoSuchPackageException.class, () -> packages.eval("pkg", RootedPath.toRootedPath(root, file))); events.assertContainsError("Directory is not readable"); } @Test public void testNativeModuleIsDisabled() throws Exception { events.setFailFast(false); Path buildFile = scratch.file("/pkg/BUILD", "native.cc_library(name='bar')"); Package pkg = packages.createPackage("pkg", RootedPath.toRootedPath(root, buildFile)); assertThat(pkg.containsErrors()).isTrue(); } @Test public void testPackageGroupSpecMinimal() throws Exception { expectEvalSuccess("package_group(name='skin', packages=[])"); } @Test public void testPackageGroupSpecSimple() throws Exception { expectEvalSuccess("package_group(name='skin', packages=['//group/abelian'])"); } @Test public void testPackageGroupSpecEmpty() throws Exception { expectEvalSuccess("package_group(name='seed')"); } @Test public void testPackageGroupSpecIncludes() throws Exception { expectEvalSuccess( "package_group(name='wine',", " includes=['//wine:cabernet_sauvignon',", " '//wine:pinot_noir'])"); } @Test public void testPackageGroupSpecBad() throws Exception { expectEvalError("invalid package name", "package_group(name='skin', packages=['--25:17--'])"); } @Test public void testPackageGroupsWithSameName() throws Exception { expectEvalError( "conflicts with existing package group", "package_group(name='skin', packages=[])", "package_group(name='skin', packages=[])"); } @Test public void testPackageGroupNamedArguments() throws Exception { expectEvalError( "package_group() got unexpected positional argument", "package_group('skin', name = 'x')"); } @Test public void testPackageSpecMinimal() throws Exception { Package pkg = expectEvalSuccess("package(default_visibility=[])"); assertThat(pkg.getDefaultVisibility()).isNotNull(); } @Test public void testPackageSpecSimple() throws Exception { expectEvalSuccess("package(default_visibility=['//group:lie'])"); } @Test public void testPackageSpecBad() throws Exception { expectEvalError("invalid target name", "package(default_visibility=[':::'])"); } @Test public void testDoublePackageSpecification() throws Exception { expectEvalError( "can only be used once", "package(default_visibility=[])", "package(default_visibility=[])"); } @Test public void testEmptyPackageSpecification() throws Exception { expectEvalError("at least one argument must be given to the 'package' function", "package()"); } @Test public void testDefaultTestonly() throws Exception { Package pkg = expectEvalSuccess("package(default_testonly = 1)"); assertThat(pkg.getDefaultTestOnly()).isTrue(); } @Test public void testDefaultDeprecation() throws Exception { String testMessage = "OMG PONIES!"; Package pkg = expectEvalSuccess("package(default_deprecation = \"" + testMessage + "\")"); assertThat(pkg.getDefaultDeprecation()).isEqualTo(testMessage); } @Test public void testExportsBuildFile() throws Exception { Package pkg = expectEvalSuccess("exports_files(['BUILD'], visibility=['//visibility:private'])"); assertThat(pkg.getTarget("BUILD")).isEqualTo(pkg.getBuildFile()); } @Test public void testDefaultDeprecationPropagation() throws Exception { String msg = "I am completely operational, and all my circuits are functioning perfectly."; Path file = scratch.file( "/foo/BUILD", "package(default_deprecation = \"" + msg + "\")", "sh_library(name = 'bar', srcs=['b'])"); Package pkg = packages.eval("foo", RootedPath.toRootedPath(root, file)); Rule fooRule = (Rule) pkg.getTarget("bar"); String deprAttr = attributes(fooRule).get("deprecation", com.google.devtools.build.lib.packages.Type.STRING); assertThat(deprAttr).isEqualTo(msg); } @Test public void testDefaultTestonlyPropagation() throws Exception { Path file = scratch.file( "/foo/BUILD", "package(default_testonly = 1)", "sh_library(name = 'foo', srcs=['b'])", "sh_library(name = 'bar', srcs=['b'], testonly = 0)"); Package pkg = packages.eval("foo", RootedPath.toRootedPath(root, file)); Rule fooRule = (Rule) pkg.getTarget("foo"); assertThat( attributes(fooRule) .get("testonly", com.google.devtools.build.lib.packages.Type.BOOLEAN)) .isTrue(); Rule barRule = (Rule) pkg.getTarget("bar"); assertThat( attributes(barRule) .get("testonly", com.google.devtools.build.lib.packages.Type.BOOLEAN)) .isFalse(); } @Test public void testDefaultDeprecationOverriding() throws Exception { String msg = "I am completely operational, and all my circuits are functioning perfectly."; String deceive = "OMG PONIES!"; Path file = scratch.file( "/foo/BUILD", "package(default_deprecation = \"" + deceive + "\")", "sh_library(name = 'bar', srcs=['b'], deprecation = \"" + msg + "\")"); Package pkg = packages.eval("foo", RootedPath.toRootedPath(root, file)); Rule fooRule = (Rule) pkg.getTarget("bar"); String deprAttr = attributes(fooRule).get("deprecation", com.google.devtools.build.lib.packages.Type.STRING); assertThat(deprAttr).isEqualTo(msg); } @Test public void testPackageFeatures() throws Exception { Path file = scratch.file( "/a/BUILD", "sh_library(name='before')", "package(features=['b', 'c'])", "sh_library(name='after')"); Package pkg = packages.eval("a", RootedPath.toRootedPath(root, file)); assertThat(pkg.getFeatures()).containsExactly("b", "c"); } @Test public void testTransientErrorsInGlobbing() throws Exception { events.setFailFast(false); Path buildFile = scratch.file("/e/BUILD", "sh_library(name = 'e', data = glob(['*.txt']))"); Path parentDir = buildFile.getParentDirectory(); scratch.file("/e/data.txt"); throwOnReaddir = parentDir; assertThrows( NoSuchPackageException.class, () -> packages.createPackage("e", RootedPath.toRootedPath(root, buildFile))); events.setFailFast(true); throwOnReaddir = null; Package pkg = packages.createPackage("e", RootedPath.toRootedPath(root, buildFile)); assertThat(pkg.containsErrors()).isFalse(); assertThat(pkg.getRule("e")).isNotNull(); List<?> globList = (List) pkg.getRule("e").getAttributeContainer().getAttr("data"); assertThat(globList).containsExactly(Label.parseAbsolute("//e:data.txt", ImmutableMap.of())); } @Test public void testExportTwicePublicOK() throws Exception { // In theory, this could be an error, but too many existing files rely on it // and it is okay. expectEvalSuccess( "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:public\" ])", "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:public\" ])"); } @Test public void testExportTwicePublicOK2() throws Exception { expectEvalSuccess( "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:private\" ])", "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:private\" ])"); } @Test public void testExportTwiceFail() throws Exception { expectEvalError( "visibility for exported file 'a.cc' declared twice", "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:private\" ])", "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:public\" ])"); } @Test public void testExportTwiceFail2() throws Exception { expectEvalError( "visibility for exported file 'a.cc' declared twice", "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:public\" ])", "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:private\" ])"); } @Test public void testExportLicenseTwice() throws Exception { expectEvalError( "licenses for exported file 'a.cc' declared twice", "exports_files([\"a.cc\"], licenses = [\"notice\"])", "exports_files([\"a.cc\"], licenses = [\"notice\"])"); } @Test public void testExportGenruleConflict() throws Exception { expectEvalError( "generated file 'a.cc' in rule 'foo' conflicts with existing source file", "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:public\" ])", "genrule(name = 'foo',", " outs = ['a.cc'],", " cmd = '')"); } @Test public void testGenruleExportConflict() throws Exception { expectEvalError( "generated label '//pkg:a.cc' conflicts with existing generated file", "genrule(name = 'foo',", " outs = ['a.cc'],", " cmd = '')", "exports_files([\"a.cc\"],", " visibility = [ \"//visibility:public\" ])"); } @Test public void testValidEnvironmentGroup() throws Exception { expectEvalSuccess( "environment(name = 'foo')", "environment_group(name='group', environments = [':foo'], defaults = [':foo'])"); } @Test public void testIncompleteEnvironmentGroup() throws Exception { expectEvalError( "environment_group() missing 1 required named argument: defaults", "environment(name = 'foo')", "environment_group(name='group', environments = [':foo'])"); } @Test public void testEnvironmentGroupMissingTarget() throws Exception { expectEvalError( "environment //pkg:foo does not exist", "environment_group(name='group', environments = [':foo'], defaults = [':foo'])"); } @Test public void testEnvironmentGroupWrongTargetType() throws Exception { expectEvalError( "//pkg:foo is not a valid environment", "cc_library(name = 'foo')", "environment_group(name='group', environments = [':foo'], defaults = [':foo'])"); } @Test public void testEnvironmentGroupWrongPackage() throws Exception { expectEvalError( "//foo:foo is not in the same package as group //pkg:group", "environment_group(name='group', environments = ['//foo'], defaults = ['//foo'])"); } @Test public void testEnvironmentGroupInvalidDefault() throws Exception { expectEvalError( "default //pkg:bar is not a declared environment for group //pkg:group", "environment(name = 'foo')", "environment(name = 'bar')", "environment_group(name='group', environments = [':foo'], defaults = [':bar'])"); } @Test public void testEnvironmentGroupDuplicateEnvironments() throws Exception { expectEvalError( "label '//pkg:foo' is duplicated in the 'environments' list of 'group'", "environment(name = 'foo')", "environment_group(name='group', environments = [':foo', ':foo'], defaults = [':foo'])"); } @Test public void testEnvironmentGroupDuplicateDefaults() throws Exception { expectEvalError( "label '//pkg:foo' is duplicated in the 'defaults' list of 'group'", "environment(name = 'foo')", "environment_group(name='group', environments = [':foo'], defaults = [':foo', ':foo'])"); } @Test public void testMultipleEnvironmentGroupsValidMembership() throws Exception { expectEvalSuccess( "environment(name = 'foo')", "environment(name = 'bar')", "environment_group(name='foo_group', environments = [':foo'], defaults = [':foo'])", "environment_group(name='bar_group', environments = [':bar'], defaults = [':bar'])"); } @Test public void testMultipleEnvironmentGroupsConflictingMembership() throws Exception { expectEvalError( "environment //pkg:foo belongs to both //pkg:bar_group and //pkg:foo_group", "environment(name = 'foo')", "environment(name = 'bar')", "environment_group(name='foo_group', environments = [':foo'], defaults = [':foo'])", "environment_group(name='bar_group', environments = [':foo'], defaults = [':foo'])"); } @Test public void testFulfillsReferencesWrongTargetType() throws Exception { expectEvalError( "in \"fulfills\" attribute of //pkg:foo: //pkg:bar is not a valid environment", "environment(name = 'foo', fulfills = [':bar'])", "cc_library(name = 'bar')", "environment_group(name='foo_group', environments = [':foo'], defaults = [])"); } @Test public void testFulfillsNotInEnvironmentGroup() throws Exception { expectEvalError( "in \"fulfills\" attribute of //pkg:foo: //pkg:bar is not a member of this group", "environment(name = 'foo', fulfills = [':bar'])", "environment(name = 'bar')", "environment_group(name='foo_group', environments = [':foo'], defaults = [])"); } @Test public void testPackageDefaultEnvironments() throws Exception { Package pkg = expectEvalSuccess( "package(", " default_compatible_with=['//foo'],", " default_restricted_to=['//bar'],", ")"); assertThat(pkg.getDefaultCompatibleWith()) .containsExactly(Label.parseAbsolute("//foo", ImmutableMap.of())); assertThat(pkg.getDefaultRestrictedTo()) .containsExactly(Label.parseAbsolute("//bar", ImmutableMap.of())); } @Test public void testPackageDefaultCompatibilityDuplicates() throws Exception { expectEvalError( "'//foo:foo' is duplicated in the 'default_compatible_with' list", "package(default_compatible_with=['//foo', '//bar', '//foo'])"); } @Test public void testPackageDefaultRestrictionDuplicates() throws Exception { expectEvalError( "'//foo:foo' is duplicated in the 'default_restricted_to' list", "package(default_restricted_to=['//foo', '//bar', '//foo'])"); } @Test public void testGlobPatternExtractor() { StarlarkFile file = StarlarkFile.parse( ParserInput.fromLines( "pattern = '*'", "some_variable = glob([", " '**/*',", " 'a' + 'b',", " pattern,", "])", "other_variable = glob(include = ['a'], exclude = ['b'])", "third_variable = glob(['c'], exclude_directories = 0)")); List<String> globs = new ArrayList<>(); List<String> globsWithDirs = new ArrayList<>(); PackageFactory.checkBuildSyntax( file, globs, globsWithDirs, new HashMap<>(), /*eventHandler=*/ null); assertThat(globs).containsExactly("ab", "a", "**/*"); assertThat(globsWithDirs).containsExactly("c"); } // Tests of BUILD file dialect checks: @Test public void testDefInBuild() throws Exception { checkBuildDialectError( "def func(): pass", // "function definitions are not allowed in BUILD files"); } @Test public void testForStatementForbiddenInBuild() throws Exception { checkBuildDialectError( "for _ in []: pass", // "for loops are not allowed"); } @Test public void testIfStatementForbiddenInBuild() throws Exception { checkBuildDialectError( "if False: pass", // "if statements are not allowed"); } @Test public void testKwargsForbiddenInBuild() throws Exception { checkBuildDialectError( "print(**dict)", // "**kwargs arguments are not allowed in BUILD files"); checkBuildDialectError( "len(dict(**{'a': 1}))", // "**kwargs arguments are not allowed in BUILD files"); } @Test public void testArgsForbiddenInBuild() throws Exception { checkBuildDialectError( "print(*['a'])", // "*args arguments are not allowed in BUILD files"); } // Asserts that evaluation of the specified BUILD file produces the expected error. // Modifies: scratch, events, packages; be careful when calling more than once per @Test! private void checkBuildDialectError(String content, String expectedError) throws IOException, InterruptedException, NoSuchPackageException { events.clear(); events.setFailFast(false); Path file = scratch.overwriteFile("/p/BUILD", content); Package pkg = packages.eval("p", RootedPath.toRootedPath(root, file)); assertThat(pkg.containsErrors()).isTrue(); events.assertContainsError(expectedError); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.kinesis; import com.amazonaws.services.kinesis.model.Shard; import com.amazonaws.services.kinesis.model.ShardIteratorType; import java.io.Serializable; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Sets; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is responsible for establishing the initial set of shards that existed at the given * starting point. */ class StartingPointShardsFinder implements Serializable { private static final Logger LOG = LoggerFactory.getLogger(StartingPointShardsFinder.class); /** * Finds all the shards at the given startingPoint. This method starts by gathering the oldest * shards in the stream and considers them as initial shards set. Then it validates the shards by * getting an iterator at the given starting point and trying to read some records. If shard * passes the validation then it is added to the result shards set. If not then it is regarded as * expired and its successors are taken into consideration. This step is repeated until all valid * shards are found. * * <p>The following diagram depicts sample split and merge operations on a stream with 3 initial * shards. Let's consider what happens when T1, T2, T3 or T4 timestamps are passed as the * startingPoint. * * <ul> * <li>T1 timestamp (or TRIM_HORIZON marker) - 0000, 0001 and 0002 shards are the oldest so they * are gathered as initial shards set. All of them are valid at T1 timestamp so they are all * returned from the method. * <li>T2 timestamp - 0000, 0001 and 0002 shards form the initial shards set. * <ul> * <li>0000 passes the validation at T2 timestamp so it is added to the result set * <li>0001 does not pass the validation as it is already closed at T2 timestamp so its * successors 0003 and 0004 are considered. Both are valid at T2 timestamp so they are * added to the resulting set. * <li>0002 also does not pass the validation so its successors 0005 and 0006 are * considered and both are valid. * </ul> * Finally the resulting set contains 0000, 0003, 0004, 0005 and 0006 shards. * <li>T3 timestamp - the beginning is the same as in T2 case. * <ul> * <li>0000 is valid * <li>0001 is already closed at T2 timestamp so its successors 0003 and 0004 are next. * 0003 is valid but 0004 is already closed at T3 timestamp. It has one successor 0007 * which is the result of merging 0004 and 0005 shards. 0007 has two parent shards * then stored in {@link Shard#parentShardId} and {@link Shard#adjacentParentShardId} * fields. Only one of them should follow the relation to its successor so it is * always the shard stored in parentShardId field. Let's assume that it was 0004 shard * and it's the one that considers 0007 its successor. 0007 is valid at T3 timestamp * and it's added to the result set. * <li>0002 is closed at T3 timestamp so its successors 0005 and 0006 are next. 0005 is * also closed because it was merged with 0004 shard. Their successor is 0007 and it * was already considered by 0004 shard so no action here is needed. Shard 0006 is * valid. * </ul> * <li>T4 timestamp (or LATEST marker) - following the same reasoning as in previous cases it * end's up with 0000, 0003, 0008 and 0010 shards. * </ul> * * <pre> * T1 T2 T3 T4 * | | | | * 0000----------------------------------------------------------- * * * 0003----------------------------------------------- * / * 0001------+ * \ * 0004-----------+ 0008------------------ * \ / * 0007------+ * / \ * 0005------+ 0009------+ * / \ * 0002-----------+ 0010------ * \ / * 0006------------------------------+ * </pre> */ Set<Shard> findShardsAtStartingPoint( SimplifiedKinesisClient kinesis, String streamName, StartingPoint startingPoint) throws TransientKinesisException { List<Shard> allShards = kinesis.listShards(streamName); Set<Shard> initialShards = findInitialShardsWithoutParents(streamName, allShards); Set<Shard> startingPointShards = new HashSet<>(); Set<Shard> expiredShards; do { Set<Shard> validShards = validateShards(kinesis, initialShards, streamName, startingPoint); startingPointShards.addAll(validShards); expiredShards = Sets.difference(initialShards, validShards); if (!expiredShards.isEmpty()) { LOG.info( "Following shards expired for {} stream at '{}' starting point: {}", streamName, startingPoint, expiredShards); } initialShards = findNextShards(allShards, expiredShards); } while (!expiredShards.isEmpty()); return startingPointShards; } private Set<Shard> findNextShards(List<Shard> allShards, Set<Shard> expiredShards) { Set<Shard> nextShards = new HashSet<>(); for (Shard expiredShard : expiredShards) { boolean successorFound = false; for (Shard shard : allShards) { if (Objects.equals(expiredShard.getShardId(), shard.getParentShardId())) { nextShards.add(shard); successorFound = true; } else if (Objects.equals(expiredShard.getShardId(), shard.getAdjacentParentShardId())) { successorFound = true; } } if (!successorFound) { // This can potentially happen during split/merge operation. Newly created shards might be // not listed in the allShards list and their predecessor is already considered expired. // Retrying should solve the issue. throw new IllegalStateException("No successors were found for shard: " + expiredShard); } } return nextShards; } /** * Finds the initial set of shards (the oldest ones). These shards do not have their parents in * the shard list. */ private Set<Shard> findInitialShardsWithoutParents(String streamName, List<Shard> allShards) { Set<String> shardIds = new HashSet<>(); for (Shard shard : allShards) { shardIds.add(shard.getShardId()); } LOG.info("Stream {} has following shards: {}", streamName, shardIds); Set<Shard> shardsWithoutParents = new HashSet<>(); for (Shard shard : allShards) { if (!shardIds.contains(shard.getParentShardId())) { shardsWithoutParents.add(shard); } } return shardsWithoutParents; } /** * Validates the shards at the given startingPoint. Validity is checked by getting an iterator at * the startingPoint and then trying to read some records. This action does not affect the records * at all. If the shard is valid then it will get read from exactly the same point and these * records will be read again. */ private Set<Shard> validateShards( SimplifiedKinesisClient kinesis, Iterable<Shard> rootShards, String streamName, StartingPoint startingPoint) throws TransientKinesisException { Set<Shard> validShards = new HashSet<>(); ShardIteratorType shardIteratorType = ShardIteratorType.fromValue(startingPoint.getPositionName()); for (Shard shard : rootShards) { String shardIterator = kinesis.getShardIterator( streamName, shard.getShardId(), shardIteratorType, null, startingPoint.getTimestamp()); GetKinesisRecordsResult records = kinesis.getRecords(shardIterator, streamName, shard.getShardId()); if (records.getNextShardIterator() != null || !records.getRecords().isEmpty()) { validShards.add(shard); } } return validShards; } }
/* Copyright (c) 2016 lib4j * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * You should have received a copy of The MIT License (MIT) along with this * program. If not, see <http://opensource.org/licenses/MIT/>. */ package org.lib4j.logging; import org.slf4j.Logger; import org.slf4j.Marker; import org.slf4j.event.Level; public final class LoggerUtil { /** * Check if a message of the given level would actually be logged * by this logger. * * @param level a message logging level * @return true if the given message level is currently being logged. */ public static boolean isLoggable(final Logger logger, final Level level) { return logger != null && level != null && ( level == Level.INFO && logger.isInfoEnabled() || level == Level.DEBUG && logger.isDebugEnabled() || level == Level.TRACE && logger.isTraceEnabled() || level == Level.WARN && logger.isWarnEnabled() || level == Level.ERROR && logger.isErrorEnabled()); } /** * Check if a message of the given level would actually be logged * by this logger. * * @param level a message logging level * @param marker The marker specific to this log statement * @return true if the given message level is currently being logged. */ public static boolean isLoggable(final Logger logger, final Level level, final Marker marker) { return logger != null && level != null && ( level == Level.INFO && logger.isInfoEnabled(marker) || level == Level.DEBUG && logger.isDebugEnabled(marker) || level == Level.TRACE && logger.isTraceEnabled(marker) || level == Level.WARN && logger.isWarnEnabled(marker) || level == Level.ERROR && logger.isErrorEnabled(marker)); } /** * Log a message using <code>logger</code> at <code>level</code> * * @param logger the logger * @param level the logging level * @param msg the message string to be logged */ public static void log(final Logger logger, final Level level, final String msg) { if (level == Level.INFO) logger.info(msg); else if (level == Level.DEBUG) logger.debug(msg); else if (level == Level.TRACE) logger.trace(msg); else if (level == Level.WARN) logger.warn(msg); else if (level == Level.ERROR) logger.error(msg); else throw new UnsupportedOperationException("Unsupported level: " + level); } /** * Log a message using the <code>logger</code> at <code>level</code> according to the specified format * and argument. * <p/> * <p>This form avoids superfluous object creation when the logger * is disabled for the <code>level</code> level. </p> * * @param logger the logger * @param level the logging level * @param format the format string * @param arg the argument */ public static void log(final Logger logger, final Level level, final String format, final Object arg) { if (level == Level.INFO) logger.info(format, arg); else if (level == Level.DEBUG) logger.debug(format, arg); else if (level == Level.TRACE) logger.trace(format, arg); else if (level == Level.WARN) logger.warn(format, arg); else if (level == Level.ERROR) logger.error(format, arg); else throw new UnsupportedOperationException("Unsupported level: " + level); } /** * Log a message using the <code>logger</code> at <code>level</code> according to the specified format * and arguments. * <p/> * <p>This form avoids superfluous object creation when the logger * is disabled for the <code>level</code> level. </p> * * @param logger the logger * @param level the logging level * @param format the format string * @param arg1 the first argument * @param arg2 the second argument */ public static void log(final Logger logger, final Level level, final String format, final Object arg1, final Object arg2) { if (level == Level.INFO) logger.info(format, arg1, arg2); else if (level == Level.DEBUG) logger.debug(format, arg1, arg2); else if (level == Level.TRACE) logger.trace(format, arg1, arg2); else if (level == Level.WARN) logger.warn(format, arg1, arg2); else if (level == Level.ERROR) logger.error(format, arg1, arg2); else throw new UnsupportedOperationException("Unsupported level: " + level); } /** * Log a message using the <code>logger</code> at <code>level</code> according to the specified format * and arguments. * <p/> * <p>This form avoids superfluous string concatenation when the logger * is disabled for the <code>level</code> level. However, final this variant incurs the hidden * (final and relatively small) cost of creating an <code>Object[]</code> before invoking the method, * even if this logger is disabled for <code>level</code>. The variants taking * {@link #log(final Logger logger, final Level level, String, Object) one} and {@link #log(final Logger logger, final Level level, String, Object, Object) two} * arguments exist solely in order to avoid this hidden cost.</p> * * @param logger the logger * @param level the logging level * @param format the format string * @param arguments a list of 3 or more arguments */ public static void log(final Logger logger, final Level level, final String format, final Object... arguments) { if (level == Level.INFO) logger.info(format, arguments); else if (level == Level.DEBUG) logger.debug(format, arguments); else if (level == Level.TRACE) logger.trace(format, arguments); else if (level == Level.WARN) logger.warn(format, arguments); else if (level == Level.ERROR) logger.error(format, arguments); else throw new UnsupportedOperationException("Unsupported level: " + level); } /** * Log an exception (throwable) using the <code>logger</code> at <code>level</code> with an * accompanying message. * * @param msg the message accompanying the exception * @param t the exception (throwable) to log */ public static void log(final Logger logger, final Level level, final String msg, final Throwable t) { if (level == Level.INFO) logger.info(msg, t); else if (level == Level.DEBUG) logger.debug(msg, t); else if (level == Level.TRACE) logger.trace(msg, t); else if (level == Level.WARN) logger.warn(msg, t); else if (level == Level.ERROR) logger.error(msg, t); else throw new UnsupportedOperationException("Unsupported level: " + level); } /** * Log a message using <code>logger</code> at <code>level</code> with the specific Marker. * * @param logger the logger * @param level the logging level * @param marker The marker specific to this log statement * @param msg the message string to be logged */ public static void log(final Logger logger, final Level level, final Marker marker, final String msg) { if (level == Level.INFO) logger.info(marker, msg); else if (level == Level.DEBUG) logger.debug(marker, msg); else if (level == Level.TRACE) logger.trace(marker, msg); else if (level == Level.WARN) logger.warn(marker, msg); else if (level == Level.ERROR) logger.error(marker, msg); else throw new UnsupportedOperationException("Unsupported level: " + level); } /** * This method is similar to {@link #log(final Logger logger, final Level level, String, Object)} method except that the * marker data is also taken into consideration. * * @param logger the logger * @param level the logging level * @param marker the marker data specific to this log statement * @param format the format string * @param arg the argument */ public static void log(final Logger logger, final Level level, final Marker marker, final String format, final Object arg) { if (level == Level.INFO) logger.info(marker, format, arg); else if (level == Level.DEBUG) logger.debug(marker, format, arg); else if (level == Level.TRACE) logger.trace(marker, format, arg); else if (level == Level.WARN) logger.warn(marker, format, arg); else if (level == Level.ERROR) logger.error(marker, format, arg); else throw new UnsupportedOperationException("Unsupported level: " + level); } /** * This method is similar to {@link #log(final Logger logger, final Level level, String, Object, Object)} * method except that the marker data is also taken into * consideration. * * @param logger the logger * @param level the logging level * @param marker the marker data specific to this log statement * @param format the format string * @param arg1 the first argument * @param arg2 the second argument */ public static void log(final Logger logger, final Level level, final Marker marker, final String format, final Object arg1, final Object arg2) { if (level == Level.INFO) logger.info(marker, format, arg1, arg2); else if (level == Level.DEBUG) logger.debug(marker, format, arg1, arg2); else if (level == Level.TRACE) logger.trace(marker, format, arg1, arg2); else if (level == Level.WARN) logger.warn(marker, format, arg1, arg2); else if (level == Level.ERROR) logger.error(marker, format, arg1, arg2); else throw new UnsupportedOperationException("Unsupported level: " + level); } /** * This method is similar to {@link #log(final Logger logger, final Level level, String, Object...)} * method except that the marker data is also taken into * consideration. * * @param logger the logger * @param level the logging level * @param marker the marker data specific to this log statement * @param format the format string * @param arguments a list of 3 or more arguments */ public static void log(final Logger logger, final Level level, final Marker marker, final String format, final Object... arguments) { if (level == Level.INFO) logger.info(marker, format, arguments); else if (level == Level.DEBUG) logger.debug(marker, format, arguments); else if (level == Level.TRACE) logger.trace(marker, format, arguments); else if (level == Level.WARN) logger.warn(marker, format, arguments); else if (level == Level.ERROR) logger.error(marker, format, arguments); else throw new UnsupportedOperationException("Unsupported level: " + level); } /** * This method is similar to {@link #log(final Logger logger, final Level level, String, Throwable)} method * except that the marker data is also taken into consideration. * * @param logger the logger * @param level the logging level * @param marker the marker data for this log statement * @param msg the message accompanying the exception * @param t the exception (throwable) to log */ public static void log(final Logger logger, final Level level, final Marker marker, final String msg, final Throwable t) { if (level == Level.INFO) logger.info(marker, msg, t); else if (level == Level.DEBUG) logger.debug(marker, msg, t); else if (level == Level.TRACE) logger.trace(marker, msg, t); else if (level == Level.WARN) logger.warn(marker, msg, t); else if (level == Level.ERROR) logger.error(marker, msg, t); else throw new UnsupportedOperationException("Unsupported level: " + level); } private LoggerUtil() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.cep.nfa; import org.apache.flink.api.common.typeutils.CompatibilityResult; import org.apache.flink.api.common.typeutils.CompatibilityUtil; import org.apache.flink.api.common.typeutils.CompositeTypeSerializerConfigSnapshot; import org.apache.flink.api.common.typeutils.TypeDeserializerAdapter; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.common.typeutils.TypeSerializerConfigSnapshot; import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot; import org.apache.flink.api.common.typeutils.UnloadableDummyTypeSerializer; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.cep.nfa.compiler.NFAStateNameHandler; import org.apache.flink.cep.nfa.sharedbuffer.EventId; import org.apache.flink.cep.nfa.sharedbuffer.Lockable; import org.apache.flink.cep.nfa.sharedbuffer.NodeId; import org.apache.flink.cep.nfa.sharedbuffer.SharedBufferEdge; import org.apache.flink.cep.nfa.sharedbuffer.SharedBufferNode; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; /** * @deprecated everything in this class is deprecated. Those are only migration procedures from older versions. */ @Deprecated public class SharedBuffer<V> { private final Map<Tuple2<String, ValueTimeWrapper<V>>, NodeId> mappingContext; /** Run number (first block in DeweyNumber) -> EventId. */ private Map<Integer, EventId> starters; private final Map<EventId, Lockable<V>> eventsBuffer; private final Map<NodeId, Lockable<SharedBufferNode>> pages; public Map<EventId, Lockable<V>> getEventsBuffer() { return eventsBuffer; } public Map<NodeId, Lockable<SharedBufferNode>> getPages() { return pages; } public SharedBuffer( Map<EventId, Lockable<V>> eventsBuffer, Map<NodeId, Lockable<SharedBufferNode>> pages, Map<Tuple2<String, ValueTimeWrapper<V>>, NodeId> mappingContext, Map<Integer, EventId> starters) { this.eventsBuffer = eventsBuffer; this.pages = pages; this.mappingContext = mappingContext; this.starters = starters; } public NodeId getNodeId(String prevState, long timestamp, int counter, V event) { return mappingContext.get(Tuple2.of(NFAStateNameHandler.getOriginalNameFromInternal(prevState), new ValueTimeWrapper<>(event, timestamp, counter))); } public EventId getStartEventId(int run) { return starters.get(run); } /** * Wrapper for a value-timestamp pair. * * @param <V> Type of the value */ private static class ValueTimeWrapper<V> { private final V value; private final long timestamp; private final int counter; ValueTimeWrapper(final V value, final long timestamp, final int counter) { this.value = value; this.timestamp = timestamp; this.counter = counter; } /** * Returns a counter used to disambiguate between different accepted * elements with the same value and timestamp that refer to the same * looping state. */ public int getCounter() { return counter; } public V getValue() { return value; } public long getTimestamp() { return timestamp; } @Override public String toString() { return "ValueTimeWrapper(" + value + ", " + timestamp + ", " + counter + ")"; } @Override public boolean equals(Object obj) { if (!(obj instanceof ValueTimeWrapper)) { return false; } @SuppressWarnings("unchecked") ValueTimeWrapper<V> other = (ValueTimeWrapper<V>) obj; return timestamp == other.getTimestamp() && Objects.equals(value, other.getValue()) && counter == other.getCounter(); } @Override public int hashCode() { return (int) (31 * (31 * (timestamp ^ timestamp >>> 32) + value.hashCode()) + counter); } public static <V> ValueTimeWrapper<V> deserialize( final TypeSerializer<V> valueSerializer, final DataInputView source) throws IOException { final V value = valueSerializer.deserialize(source); final long timestamp = source.readLong(); final int counter = source.readInt(); return new ValueTimeWrapper<>(value, timestamp, counter); } } /** * The {@link TypeSerializerConfigSnapshot} serializer configuration to be stored with the managed state. */ public static final class SharedBufferSerializerConfigSnapshot<K, V> extends CompositeTypeSerializerConfigSnapshot<SharedBuffer<V>> { private static final int VERSION = 1; /** This empty constructor is required for deserializing the configuration. */ public SharedBufferSerializerConfigSnapshot() { } public SharedBufferSerializerConfigSnapshot( final TypeSerializer<K> keySerializer, final TypeSerializer<V> valueSerializer, final TypeSerializer<DeweyNumber> versionSerializer) { super(keySerializer, valueSerializer, versionSerializer); } @Override public int getVersion() { return VERSION; } } /** * A {@link TypeSerializer} for the {@link SharedBuffer}. */ public static class SharedBufferSerializer<K, V> extends TypeSerializer<SharedBuffer<V>> { private static final long serialVersionUID = -3254176794680331560L; private final TypeSerializer<K> keySerializer; private final TypeSerializer<V> valueSerializer; private final TypeSerializer<DeweyNumber> versionSerializer; public SharedBufferSerializer( final TypeSerializer<K> keySerializer, final TypeSerializer<V> valueSerializer) { this(keySerializer, valueSerializer, DeweyNumber.DeweyNumberSerializer.INSTANCE); } public SharedBufferSerializer( final TypeSerializer<K> keySerializer, final TypeSerializer<V> valueSerializer, final TypeSerializer<DeweyNumber> versionSerializer) { this.keySerializer = keySerializer; this.valueSerializer = valueSerializer; this.versionSerializer = versionSerializer; } public TypeSerializer<DeweyNumber> getVersionSerializer() { return versionSerializer; } public TypeSerializer<K> getKeySerializer() { return keySerializer; } public TypeSerializer<V> getValueSerializer() { return valueSerializer; } @Override public boolean isImmutableType() { return false; } @Override public SharedBufferSerializer<K, V> duplicate() { return new SharedBufferSerializer<>(keySerializer.duplicate(), valueSerializer.duplicate()); } @Override public SharedBuffer<V> createInstance() { throw new UnsupportedOperationException(); } @Override public SharedBuffer<V> copy(SharedBuffer<V> from) { throw new UnsupportedOperationException(); } @Override public SharedBuffer<V> copy(SharedBuffer<V> from, SharedBuffer<V> reuse) { throw new UnsupportedOperationException(); } @Override public int getLength() { return -1; } @Override public void serialize(SharedBuffer<V> record, DataOutputView target) throws IOException { throw new UnsupportedOperationException(); } @Override public SharedBuffer<V> deserialize(DataInputView source) throws IOException { List<Tuple2<NodeId, Lockable<SharedBufferNode>>> entries = new ArrayList<>(); Map<ValueTimeWrapper<V>, EventId> values = new HashMap<>(); Map<EventId, Lockable<V>> valuesWithIds = new HashMap<>(); Map<Tuple2<String, ValueTimeWrapper<V>>, NodeId> mappingContext = new HashMap<>(); Map<Long, Integer> totalEventsPerTimestamp = new HashMap<>(); int totalPages = source.readInt(); for (int i = 0; i < totalPages; i++) { // key of the page K stateName = keySerializer.deserialize(source); int numberEntries = source.readInt(); for (int j = 0; j < numberEntries; j++) { ValueTimeWrapper<V> wrapper = ValueTimeWrapper.deserialize(valueSerializer, source); EventId eventId = values.get(wrapper); if (eventId == null) { int id = totalEventsPerTimestamp.computeIfAbsent(wrapper.timestamp, k -> 0); eventId = new EventId(id, wrapper.timestamp); values.put(wrapper, eventId); valuesWithIds.put(eventId, new Lockable<>(wrapper.value, 1)); totalEventsPerTimestamp.computeIfPresent(wrapper.timestamp, (k, v) -> v + 1); } else { Lockable<V> eventWrapper = valuesWithIds.get(eventId); eventWrapper.lock(); } NodeId nodeId = new NodeId(eventId, (String) stateName); int refCount = source.readInt(); entries.add(Tuple2.of(nodeId, new Lockable<>(new SharedBufferNode(), refCount))); mappingContext.put(Tuple2.of((String) stateName, wrapper), nodeId); } } // read the edges of the shared buffer entries int totalEdges = source.readInt(); Map<Integer, EventId> starters = new HashMap<>(); for (int j = 0; j < totalEdges; j++) { int sourceIdx = source.readInt(); int targetIdx = source.readInt(); DeweyNumber version = versionSerializer.deserialize(source); // We've already deserialized the shared buffer entry. Simply read its ID and // retrieve the buffer entry from the list of entries Tuple2<NodeId, Lockable<SharedBufferNode>> sourceEntry = entries.get(sourceIdx); Tuple2<NodeId, Lockable<SharedBufferNode>> targetEntry = targetIdx < 0 ? Tuple2.of(null, null) : entries.get(targetIdx); sourceEntry.f1.getElement().addEdge(new SharedBufferEdge(targetEntry.f0, version)); if (version.length() == 1) { starters.put(version.getRun(), sourceEntry.f0.getEventId()); } } Map<NodeId, Lockable<SharedBufferNode>> entriesMap = entries.stream().collect(Collectors.toMap(e -> e.f0, e -> e.f1)); return new SharedBuffer<>(valuesWithIds, entriesMap, mappingContext, starters); } @Override public SharedBuffer<V> deserialize(SharedBuffer<V> reuse, DataInputView source) throws IOException { return deserialize(source); } @Override public void copy(DataInputView source, DataOutputView target) throws IOException { throw new UnsupportedOperationException(); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj == null || !Objects.equals(obj.getClass(), getClass())) { return false; } SharedBufferSerializer other = (SharedBufferSerializer) obj; return Objects.equals(keySerializer, other.getKeySerializer()) && Objects.equals(valueSerializer, other.getValueSerializer()) && Objects.equals(versionSerializer, other.getVersionSerializer()); } @Override public boolean canEqual(Object obj) { return true; } @Override public int hashCode() { return 37 * keySerializer.hashCode() + valueSerializer.hashCode(); } @Override public TypeSerializerConfigSnapshot<SharedBuffer<V>> snapshotConfiguration() { return new SharedBufferSerializerConfigSnapshot<>( keySerializer, valueSerializer, versionSerializer); } @Override public CompatibilityResult<SharedBuffer<V>> ensureCompatibility(TypeSerializerConfigSnapshot configSnapshot) { if (configSnapshot instanceof SharedBufferSerializerConfigSnapshot) { List<Tuple2<TypeSerializer<?>, TypeSerializerSnapshot<?>>> serializerConfigSnapshots = ((SharedBufferSerializerConfigSnapshot<?, ?>) configSnapshot).getNestedSerializersAndConfigs(); CompatibilityResult<K> keyCompatResult = CompatibilityUtil.resolveCompatibilityResult( serializerConfigSnapshots.get(0).f0, UnloadableDummyTypeSerializer.class, serializerConfigSnapshots.get(0).f1, keySerializer); CompatibilityResult<V> valueCompatResult = CompatibilityUtil.resolveCompatibilityResult( serializerConfigSnapshots.get(1).f0, UnloadableDummyTypeSerializer.class, serializerConfigSnapshots.get(1).f1, valueSerializer); CompatibilityResult<DeweyNumber> versionCompatResult = CompatibilityUtil.resolveCompatibilityResult( serializerConfigSnapshots.get(2).f0, UnloadableDummyTypeSerializer.class, serializerConfigSnapshots.get(2).f1, versionSerializer); if (!keyCompatResult.isRequiresMigration() && !valueCompatResult.isRequiresMigration() && !versionCompatResult.isRequiresMigration()) { return CompatibilityResult.compatible(); } else { if (keyCompatResult.getConvertDeserializer() != null && valueCompatResult.getConvertDeserializer() != null && versionCompatResult.getConvertDeserializer() != null) { return CompatibilityResult.requiresMigration( new SharedBufferSerializer<>( new TypeDeserializerAdapter<>(keyCompatResult.getConvertDeserializer()), new TypeDeserializerAdapter<>(valueCompatResult.getConvertDeserializer()), new TypeDeserializerAdapter<>(versionCompatResult.getConvertDeserializer()) )); } } } return CompatibilityResult.requiresMigration(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.util; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.Random; import org.apache.commons.io.IOUtils; import org.apache.hadoop.fs.Path; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * A JUnit test to test {@link SysInfoLinux} * Create the fake /proc/ information and verify the parsing and calculation */ public class TestSysInfoLinux { /** * LinuxResourceCalculatorPlugin with a fake timer */ static class FakeLinuxResourceCalculatorPlugin extends SysInfoLinux { long currentTime = 0; public FakeLinuxResourceCalculatorPlugin(String procfsMemFile, String procfsCpuFile, String procfsStatFile, String procfsNetFile, long jiffyLengthInMillis) { super(procfsMemFile, procfsCpuFile, procfsStatFile, procfsNetFile, jiffyLengthInMillis); } @Override long getCurrentTime() { return currentTime; } public void advanceTime(long adv) { currentTime += adv * this.getJiffyLengthInMillis(); } } private static final FakeLinuxResourceCalculatorPlugin plugin; private static String TEST_ROOT_DIR = new Path(System.getProperty( "test.build.data", "/tmp")).toString().replace(' ', '+'); private static final String FAKE_MEMFILE; private static final String FAKE_CPUFILE; private static final String FAKE_STATFILE; private static final String FAKE_NETFILE; private static final long FAKE_JIFFY_LENGTH = 10L; static { int randomNum = (new Random()).nextInt(1000000000); FAKE_MEMFILE = TEST_ROOT_DIR + File.separator + "MEMINFO_" + randomNum; FAKE_CPUFILE = TEST_ROOT_DIR + File.separator + "CPUINFO_" + randomNum; FAKE_STATFILE = TEST_ROOT_DIR + File.separator + "STATINFO_" + randomNum; FAKE_NETFILE = TEST_ROOT_DIR + File.separator + "NETINFO_" + randomNum; plugin = new FakeLinuxResourceCalculatorPlugin(FAKE_MEMFILE, FAKE_CPUFILE, FAKE_STATFILE, FAKE_NETFILE, FAKE_JIFFY_LENGTH); } static final String MEMINFO_FORMAT = "MemTotal: %d kB\n" + "MemFree: %d kB\n" + "Buffers: 138244 kB\n" + "Cached: 947780 kB\n" + "SwapCached: 142880 kB\n" + "Active: 3229888 kB\n" + "Inactive: %d kB\n" + "SwapTotal: %d kB\n" + "SwapFree: %d kB\n" + "Dirty: 122012 kB\n" + "Writeback: 0 kB\n" + "AnonPages: 2710792 kB\n" + "Mapped: 24740 kB\n" + "Slab: 132528 kB\n" + "SReclaimable: 105096 kB\n" + "SUnreclaim: 27432 kB\n" + "PageTables: 11448 kB\n" + "NFS_Unstable: 0 kB\n" + "Bounce: 0 kB\n" + "CommitLimit: 4125904 kB\n" + "Committed_AS: 4143556 kB\n" + "VmallocTotal: 34359738367 kB\n" + "VmallocUsed: 1632 kB\n" + "VmallocChunk: 34359736375 kB\n" + "HugePages_Total: 0\n" + "HugePages_Free: 0\n" + "HugePages_Rsvd: 0\n" + "Hugepagesize: 2048 kB"; static final String CPUINFO_FORMAT = "processor : %s\n" + "vendor_id : AuthenticAMD\n" + "cpu family : 15\n" + "model : 33\n" + "model name : Dual Core AMD Opteron(tm) Processor 280\n" + "stepping : 2\n" + "cpu MHz : %f\n" + "cache size : 1024 KB\n" + "physical id : %s\n" + "siblings : 2\n" + "core id : %s\n" + "cpu cores : 2\n" + "fpu : yes\n" + "fpu_exception : yes\n" + "cpuid level : 1\n" + "wp : yes\n" + "flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov " + "pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt lm " + "3dnowext 3dnow pni lahf_lm cmp_legacy\n" + "bogomips : 4792.41\n" + "TLB size : 1024 4K pages\n" + "clflush size : 64\n" + "cache_alignment : 64\n" + "address sizes : 40 bits physical, 48 bits virtual\n" + "power management: ts fid vid ttp"; static final String STAT_FILE_FORMAT = "cpu %d %d %d 1646495089 831319 48713 164346 0\n" + "cpu0 15096055 30805 3823005 411456015 206027 13 14269 0\n" + "cpu1 14760561 89890 6432036 408707910 456857 48074 130857 0\n" + "cpu2 12761169 20842 3758639 413976772 98028 411 10288 0\n" + "cpu3 12355207 47322 5789691 412354390 70406 213 8931 0\n" + "intr 114648668 20010764 2 0 945665 2 0 0 0 0 0 0 0 4 0 0 0 0 0 0\n" + "ctxt 242017731764\n" + "btime 1257808753\n" + "processes 26414943\n" + "procs_running 1\n" + "procs_blocked 0\n"; static final String NETINFO_FORMAT = "Inter-| Receive | Transmit\n"+ "face |bytes packets errs drop fifo frame compressed multicast|bytes packets"+ "errs drop fifo colls carrier compressed\n"+ " lo: 42236310 563003 0 0 0 0 0 0 42236310 563003 " + "0 0 0 0 0 0\n"+ " eth0: %d 3452527 0 0 0 0 0 299787 %d 1866280 0 0 " + "0 0 0 0\n"+ " eth1: %d 3152521 0 0 0 0 0 219781 %d 1866290 0 0 " + "0 0 0 0\n"; /** * Test parsing /proc/stat and /proc/cpuinfo * @throws IOException */ @Test public void parsingProcStatAndCpuFile() throws IOException { // Write fake /proc/cpuinfo file. long numProcessors = 8; long cpuFrequencyKHz = 2392781; String fileContent = ""; for (int i = 0; i < numProcessors; i++) { fileContent += String.format(CPUINFO_FORMAT, i, cpuFrequencyKHz / 1000D, 0, 0) + "\n"; } File tempFile = new File(FAKE_CPUFILE); tempFile.deleteOnExit(); FileWriter fWriter = new FileWriter(FAKE_CPUFILE); fWriter.write(fileContent); fWriter.close(); assertEquals(plugin.getNumProcessors(), numProcessors); assertEquals(plugin.getCpuFrequency(), cpuFrequencyKHz); // Write fake /proc/stat file. long uTime = 54972994; long nTime = 188860; long sTime = 19803373; tempFile = new File(FAKE_STATFILE); tempFile.deleteOnExit(); updateStatFile(uTime, nTime, sTime); assertEquals(plugin.getCumulativeCpuTime(), FAKE_JIFFY_LENGTH * (uTime + nTime + sTime)); assertEquals(plugin.getCpuUsage(), (float)(CpuTimeTracker.UNAVAILABLE),0.0); // Advance the time and sample again to test the CPU usage calculation uTime += 100L; plugin.advanceTime(200L); updateStatFile(uTime, nTime, sTime); assertEquals(plugin.getCumulativeCpuTime(), FAKE_JIFFY_LENGTH * (uTime + nTime + sTime)); assertEquals(plugin.getCpuUsage(), 6.25F, 0.0); // Advance the time and sample again. This time, we call getCpuUsage() only. uTime += 600L; plugin.advanceTime(300L); updateStatFile(uTime, nTime, sTime); assertEquals(plugin.getCpuUsage(), 25F, 0.0); // Advance very short period of time (one jiffy length). // In this case, CPU usage should not be updated. uTime += 1L; plugin.advanceTime(1L); updateStatFile(uTime, nTime, sTime); assertEquals(plugin.getCumulativeCpuTime(), FAKE_JIFFY_LENGTH * (uTime + nTime + sTime)); assertEquals(plugin.getCpuUsage(), 25F, 0.0); // CPU usage is not updated. } /** * Write information to fake /proc/stat file */ private void updateStatFile(long uTime, long nTime, long sTime) throws IOException { FileWriter fWriter = new FileWriter(FAKE_STATFILE); fWriter.write(String.format(STAT_FILE_FORMAT, uTime, nTime, sTime)); fWriter.close(); } /** * Test parsing /proc/meminfo * @throws IOException */ @Test public void parsingProcMemFile() throws IOException { long memTotal = 4058864L; long memFree = 99632L; long inactive = 567732L; long swapTotal = 2096472L; long swapFree = 1818480L; File tempFile = new File(FAKE_MEMFILE); tempFile.deleteOnExit(); FileWriter fWriter = new FileWriter(FAKE_MEMFILE); fWriter.write(String.format(MEMINFO_FORMAT, memTotal, memFree, inactive, swapTotal, swapFree)); fWriter.close(); assertEquals(plugin.getAvailablePhysicalMemorySize(), 1024L * (memFree + inactive)); assertEquals(plugin.getAvailableVirtualMemorySize(), 1024L * (memFree + inactive + swapFree)); assertEquals(plugin.getPhysicalMemorySize(), 1024L * memTotal); assertEquals(plugin.getVirtualMemorySize(), 1024L * (memTotal + swapTotal)); } @Test public void testCoreCounts() throws IOException { String fileContent = ""; // single core, hyper threading long numProcessors = 2; long cpuFrequencyKHz = 2392781; for (int i = 0; i < numProcessors; i++) { fileContent = fileContent.concat(String.format(CPUINFO_FORMAT, i, cpuFrequencyKHz / 1000D, 0, 0)); fileContent = fileContent.concat("\n"); } writeFakeCPUInfoFile(fileContent); plugin.setReadCpuInfoFile(false); assertEquals(numProcessors, plugin.getNumProcessors()); assertEquals(1, plugin.getNumCores()); // single socket quad core, no hyper threading fileContent = ""; numProcessors = 4; for (int i = 0; i < numProcessors; i++) { fileContent = fileContent.concat(String.format(CPUINFO_FORMAT, i, cpuFrequencyKHz / 1000D, 0, i)); fileContent = fileContent.concat("\n"); } writeFakeCPUInfoFile(fileContent); plugin.setReadCpuInfoFile(false); assertEquals(numProcessors, plugin.getNumProcessors()); assertEquals(4, plugin.getNumCores()); // dual socket single core, hyper threading fileContent = ""; numProcessors = 4; for (int i = 0; i < numProcessors; i++) { fileContent = fileContent.concat(String.format(CPUINFO_FORMAT, i, cpuFrequencyKHz / 1000D, i / 2, 0)); fileContent = fileContent.concat("\n"); } writeFakeCPUInfoFile(fileContent); plugin.setReadCpuInfoFile(false); assertEquals(numProcessors, plugin.getNumProcessors()); assertEquals(2, plugin.getNumCores()); // dual socket, dual core, no hyper threading fileContent = ""; numProcessors = 4; for (int i = 0; i < numProcessors; i++) { fileContent = fileContent.concat(String.format(CPUINFO_FORMAT, i, cpuFrequencyKHz / 1000D, i / 2, i % 2)); fileContent = fileContent.concat("\n"); } writeFakeCPUInfoFile(fileContent); plugin.setReadCpuInfoFile(false); assertEquals(numProcessors, plugin.getNumProcessors()); assertEquals(4, plugin.getNumCores()); // dual socket, dual core, hyper threading fileContent = ""; numProcessors = 8; for (int i = 0; i < numProcessors; i++) { fileContent = fileContent.concat(String.format(CPUINFO_FORMAT, i, cpuFrequencyKHz / 1000D, i / 4, (i % 4) / 2)); fileContent = fileContent.concat("\n"); } writeFakeCPUInfoFile(fileContent); plugin.setReadCpuInfoFile(false); assertEquals(numProcessors, plugin.getNumProcessors()); assertEquals(4, plugin.getNumCores()); } private void writeFakeCPUInfoFile(String content) throws IOException { File tempFile = new File(FAKE_CPUFILE); FileWriter fWriter = new FileWriter(FAKE_CPUFILE); tempFile.deleteOnExit(); try { fWriter.write(content); } finally { IOUtils.closeQuietly(fWriter); } } /** * Test parsing /proc/net/dev * @throws IOException */ @Test public void parsingProcNetFile() throws IOException { long numBytesReadIntf1 = 2097172468L; long numBytesWrittenIntf1 = 1355620114L; long numBytesReadIntf2 = 1097172460L; long numBytesWrittenIntf2 = 1055620110L; File tempFile = new File(FAKE_NETFILE); tempFile.deleteOnExit(); FileWriter fWriter = new FileWriter(FAKE_NETFILE); fWriter.write(String.format(NETINFO_FORMAT, numBytesReadIntf1, numBytesWrittenIntf1, numBytesReadIntf2, numBytesWrittenIntf2)); fWriter.close(); assertEquals(plugin.getNetworkBytesRead(), numBytesReadIntf1 + numBytesReadIntf2); assertEquals(plugin.getNetworkBytesWritten(), numBytesWrittenIntf1 + numBytesWrittenIntf2); } }
package cd.go.plugin.config.yaml; import com.google.gson.*; import com.thoughtworks.go.plugin.api.GoApplicationAccessor; import com.thoughtworks.go.plugin.api.exceptions.UnhandledRequestTypeException; import com.thoughtworks.go.plugin.api.request.DefaultGoPluginApiRequest; import com.thoughtworks.go.plugin.api.request.GoApiRequest; import com.thoughtworks.go.plugin.api.response.DefaultGoApiResponse; import com.thoughtworks.go.plugin.api.response.DefaultGoPluginApiResponse; import com.thoughtworks.go.plugin.api.response.GoApiResponse; import com.thoughtworks.go.plugin.api.response.GoPluginApiResponse; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.hamcrest.core.Is; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.util.Base64; import java.util.Collections; import static cd.go.plugin.config.yaml.ConfigRepoMessages.REQ_PLUGIN_SETTINGS_CHANGED; import static cd.go.plugin.config.yaml.PluginSettings.DEFAULT_FILE_PATTERN; import static cd.go.plugin.config.yaml.TestUtils.getResourceAsStream; import static cd.go.plugin.config.yaml.TestUtils.readJsonObject; import static com.thoughtworks.go.plugin.api.response.DefaultGoPluginApiResponse.SUCCESS_RESPONSE_CODE; import static org.hamcrest.core.Is.is; import static org.junit.Assert.*; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; public class YamlConfigPluginIntegrationTest { @Rule public TemporaryFolder tempDir = new TemporaryFolder(); private YamlConfigPlugin plugin; private GoApplicationAccessor goAccessor; private JsonParser parser; @Before public void setUp() { plugin = new YamlConfigPlugin(); goAccessor = mock(GoApplicationAccessor.class); plugin.initializeGoApplicationAccessor(goAccessor); GoApiResponse settingsResponse = DefaultGoApiResponse.success("{}"); when(goAccessor.submit(any(GoApiRequest.class))).thenReturn(settingsResponse); parser = new JsonParser(); } @Test public void respondsToParseContentRequest() throws Exception { final Gson gson = new Gson(); DefaultGoPluginApiRequest request = new DefaultGoPluginApiRequest("configrepo", "2.0", ConfigRepoMessages.REQ_PARSE_CONTENT); StringWriter w = new StringWriter(); IOUtils.copy(getResourceAsStream("examples/simple.gocd.yaml"), w); request.setRequestBody(gson.toJson( Collections.singletonMap("contents", Collections.singletonMap("simple.gocd.yaml", w.toString()) ) )); GoPluginApiResponse response = plugin.handle(request); assertEquals(SUCCESS_RESPONSE_CODE, response.responseCode()); JsonObject responseJsonObject = getJsonObjectFromResponse(response); assertNoError(responseJsonObject); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(1)); JsonObject expected = (JsonObject) readJsonObject("examples.out/simple.gocd.json"); assertThat(responseJsonObject, is(new JsonObjectMatcher(expected))); } @Test public void respondsToGetConfigFiles() throws Exception { final Gson gson = new Gson(); DefaultGoPluginApiRequest request = new DefaultGoPluginApiRequest("configrepo", "3.0", ConfigRepoMessages.REQ_CONFIG_FILES); FileUtils.copyInputStreamToFile( getResourceAsStream("/examples/simple.gocd.yaml"), tempDir.newFile("valid.gocd.yaml") ); FileUtils.copyInputStreamToFile( getResourceAsStream("/examples/simple-invalid.gocd.yaml"), tempDir.newFile("invalid.gocd.yaml") ); request.setRequestBody(gson.toJson( Collections.singletonMap("directory", tempDir.getRoot().toString()) )); GoPluginApiResponse response = plugin.handle(request); assertEquals(SUCCESS_RESPONSE_CODE, response.responseCode()); JsonArray files = getJsonObjectFromResponse(response).get("files").getAsJsonArray(); assertThat(files.size(), is(2)); assertTrue(files.contains(new JsonPrimitive("valid.gocd.yaml"))); assertTrue(files.contains(new JsonPrimitive("invalid.gocd.yaml"))); } @Test public void shouldRespondSuccessToGetConfigurationRequest() throws UnhandledRequestTypeException { DefaultGoPluginApiRequest getConfigRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "go.plugin-settings.get-configuration"); GoPluginApiResponse response = plugin.handle(getConfigRequest); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); } @Test public void shouldContainFilePatternInResponseToGetConfigurationRequest() throws UnhandledRequestTypeException { DefaultGoPluginApiRequest getConfigRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "go.plugin-settings.get-configuration"); GoPluginApiResponse response = plugin.handle(getConfigRequest); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); JsonElement pattern = responseJsonObject.get("file_pattern"); assertNotNull(pattern); JsonObject patternAsJsonObject = pattern.getAsJsonObject(); assertThat(patternAsJsonObject.get("display-name").getAsString(), is("Go YAML files pattern")); assertThat(patternAsJsonObject.get("default-value").getAsString(), is("**/*.gocd.yaml,**/*.gocd.yml")); assertThat(patternAsJsonObject.get("required").getAsBoolean(), is(false)); assertThat(patternAsJsonObject.get("secure").getAsBoolean(), is(false)); assertThat(patternAsJsonObject.get("display-order").getAsInt(), is(0)); } @Test public void shouldRespondSuccessToGetViewRequest() throws UnhandledRequestTypeException { DefaultGoPluginApiRequest getConfigRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "go.plugin-settings.get-view"); GoPluginApiResponse response = plugin.handle(getConfigRequest); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); } @Test public void shouldRespondSuccessToValidateConfigRequest() throws UnhandledRequestTypeException { DefaultGoPluginApiRequest validateRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "go.plugin-settings.validate-configuration"); GoPluginApiResponse response = plugin.handle(validateRequest); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); } @Test public void shouldRespondSuccessToParseDirectoryRequestWhenEmpty() throws UnhandledRequestTypeException { GoPluginApiResponse response = parseAndGetResponseForDir(tempDir.getRoot()); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); assertNoError(responseJsonObject); } @Test public void shouldRespondSuccessToParseDirectoryRequestWhenSimpleCaseFile() throws UnhandledRequestTypeException, IOException { GoPluginApiResponse response = parseAndGetResponseForDir(setupCase("simple")); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); assertNoError(responseJsonObject); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(1)); JsonObject expected = (JsonObject) readJsonObject("examples.out/simple.gocd.json"); assertThat(responseJsonObject, is(new JsonObjectMatcher(expected))); } @Test public void shouldRespondSuccessToParseDirectoryRequestWhenRichCaseFile() throws UnhandledRequestTypeException, IOException { GoPluginApiResponse response = parseAndGetResponseForDir(setupCase("rich")); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); assertNoError(responseJsonObject); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(1)); JsonObject expected = (JsonObject) readJsonObject("examples.out/rich.gocd.json"); assertThat(responseJsonObject, is(new JsonObjectMatcher(expected))); } @Test public void shouldRespondSuccessToParseDirectoryRequestWhenFormat9WithWhitelistAndIncludes() throws UnhandledRequestTypeException, IOException { GoPluginApiResponse response = parseAndGetResponseForDir(setupCase("format-version-9")); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); assertNoError(responseJsonObject); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(2)); JsonObject expected = (JsonObject) readJsonObject("examples.out/format-version-9.gocd.json"); assertThat(responseJsonObject, is(new JsonObjectMatcher(expected))); } @Test public void shouldRespondSuccessToParseDirectoryRequestWhenFormat10WithWhitelistAndIncludes() throws UnhandledRequestTypeException, IOException { GoPluginApiResponse response = parseAndGetResponseForDir(setupCase("format-version-10")); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); assertNoError(responseJsonObject); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(2)); JsonObject expected = (JsonObject) readJsonObject("examples.out/format-version-10.gocd.json"); assertThat(responseJsonObject, is(new JsonObjectMatcher(expected))); } @Test public void shouldRespondSuccessWithErrorMessagesToParseDirectoryRequestWhenSimpleInvalidCaseFile() throws UnhandledRequestTypeException, IOException { GoPluginApiResponse response = parseAndGetResponseForDir(setupCase("simple-invalid")); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(0)); assertFirstError(responseJsonObject, "Failed to parse pipeline pipe1; expected a hash of pipeline materials", "simple-invalid.gocd.yaml"); } @Test public void shouldRespondSuccessWithErrorMessagesToParseDirectoryRequestWhenDuplicateKeysCaseFile() throws UnhandledRequestTypeException, IOException { GoPluginApiResponse response = parseAndGetResponseForDir(setupCase("duplicate-materials")); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(0)); assertFirstError(responseJsonObject, "Line 9, column 20: Duplicate key found 'upstream'", "duplicate-materials.gocd.yaml"); } @Test public void shouldRespondSuccessWithErrorMessagesToParseDirectoryRequestWhenParsingErrorCaseFile() throws UnhandledRequestTypeException, IOException { GoPluginApiResponse response = parseAndGetResponseForDir(setupCase("invalid-materials")); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(0)); assertFirstError(responseJsonObject, "Error parsing YAML. : Line 21, column 0: Expected a 'block end' but found: scalar : ", "invalid-materials.gocd.yaml"); } @Test public void shouldRespondBadRequestToParseDirectoryRequestWhenDirectoryIsNotSpecified() throws UnhandledRequestTypeException { DefaultGoPluginApiRequest parseDirectoryRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "parse-directory"); String requestBody = "{\n" + " \"configurations\":[]\n" + "}"; parseDirectoryRequest.setRequestBody(requestBody); GoPluginApiResponse response = plugin.handle(parseDirectoryRequest); assertThat(response.responseCode(), is(DefaultGoPluginApiResponse.BAD_REQUEST)); } @Test public void shouldParseDirectoryWithCustomPatternWhenInConfigurations() throws UnhandledRequestTypeException, IOException { File simpleCaseDir = setupCase("simple", "go.yml"); DefaultGoPluginApiRequest parseDirectoryRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "parse-directory"); String requestBody = "{\n" + " \"directory\":\"" + simpleCaseDir + "\",\n" + " \"configurations\":[" + "{" + "\"key\" : \"file_pattern\"," + "\"value\" : \"simple.go.yml\" " + "}" + "]\n" + "}"; parseDirectoryRequest.setRequestBody(requestBody); GoPluginApiResponse response = plugin.handle(parseDirectoryRequest); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); assertNoError(responseJsonObject); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(1)); } @Test public void shouldRespondBadRequestToParseDirectoryRequestWhenRequestBodyIsNull() throws UnhandledRequestTypeException { DefaultGoPluginApiRequest parseDirectoryRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "parse-directory"); String requestBody = null; parseDirectoryRequest.setRequestBody(requestBody); GoPluginApiResponse response = plugin.handle(parseDirectoryRequest); assertThat(response.responseCode(), is(DefaultGoPluginApiResponse.BAD_REQUEST)); } @Test public void shouldRespondBadRequestToParseDirectoryRequestWhenRequestBodyIsEmpty() throws UnhandledRequestTypeException { DefaultGoPluginApiRequest parseDirectoryRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "parse-directory"); parseDirectoryRequest.setRequestBody("{}"); GoPluginApiResponse response = plugin.handle(parseDirectoryRequest); assertThat(response.responseCode(), is(DefaultGoPluginApiResponse.BAD_REQUEST)); } @Test public void shouldRespondBadRequestToParseDirectoryRequestWhenRequestBodyIsNotJson() throws UnhandledRequestTypeException { DefaultGoPluginApiRequest parseDirectoryRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "parse-directory"); parseDirectoryRequest.setRequestBody("{bla"); GoPluginApiResponse response = plugin.handle(parseDirectoryRequest); assertThat(response.responseCode(), is(DefaultGoPluginApiResponse.BAD_REQUEST)); } @Test public void shouldConsumePluginSettingsOnConfigChangeRequest() throws UnhandledRequestTypeException { DefaultGoPluginApiRequest request = new DefaultGoPluginApiRequest("configrepo", "2.0", REQ_PLUGIN_SETTINGS_CHANGED); request.setRequestBody("{\"file_pattern\": \"*.foo.gocd.yaml\"}"); assertEquals(DEFAULT_FILE_PATTERN, plugin.getFilePattern()); GoPluginApiResponse response = plugin.handle(request); assertEquals("*.foo.gocd.yaml", plugin.getFilePattern()); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); } @Test public void shouldRespondSuccessToParseDirectoryRequestWhenPluginHasConfiguration() throws UnhandledRequestTypeException { GoApiResponse settingsResponse = DefaultGoApiResponse.success("{}"); when(goAccessor.submit(any(GoApiRequest.class))).thenReturn(settingsResponse); GoPluginApiResponse response = parseAndGetResponseForDir(tempDir.getRoot()); verify(goAccessor, times(1)).submit(any(GoApiRequest.class)); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); } @Test public void shouldContainValidFieldsInResponseMessage() throws UnhandledRequestTypeException { GoApiResponse settingsResponse = DefaultGoApiResponse.success("{}"); when(goAccessor.submit(any(GoApiRequest.class))).thenReturn(settingsResponse); GoPluginApiResponse response = parseAndGetResponseForDir(tempDir.getRoot()); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); final JsonParser parser = new JsonParser(); JsonElement responseObj = parser.parse(response.responseBody()); assertTrue(responseObj.isJsonObject()); JsonObject obj = responseObj.getAsJsonObject(); assertTrue(obj.has("errors")); assertTrue(obj.has("pipelines")); assertTrue(obj.has("environments")); assertTrue(obj.has("target_version")); } @Test public void shouldRespondSuccessToParseDirectoryRequestWhenAliasesCaseFile() throws UnhandledRequestTypeException, IOException { GoPluginApiResponse response = parseAndGetResponseForDir(setupCase("aliases")); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject responseJsonObject = getJsonObjectFromResponse(response); assertNoError(responseJsonObject); JsonArray pipelines = responseJsonObject.get("pipelines").getAsJsonArray(); assertThat(pipelines.size(), is(1)); JsonObject expected = (JsonObject) readJsonObject("examples.out/aliases.gocd.json"); assertThat(responseJsonObject, is(new JsonObjectMatcher(expected))); } @Test public void shouldUpdateTargetVersionWhenItIsTheSameAcrossAllFiles() throws Exception { FileUtils.copyInputStreamToFile(getResourceAsStream("/parts/roots/version_2.yaml"), tempDir.newFile("v2_1.gocd.yaml")); FileUtils.copyInputStreamToFile(getResourceAsStream("/parts/roots/version_2.yaml"), tempDir.newFile("v2_2.gocd.yaml")); GoPluginApiResponse response = parseAndGetResponseForDir(tempDir.getRoot()); assertNoError(getJsonObjectFromResponse(response)); } @Test public void shouldUpdateTargetVersionWhenItIsTheDefaultOrMissingAcrossAllPipelinesAndEnvironments() throws Exception { FileUtils.copyInputStreamToFile(getResourceAsStream("/parts/roots/version_1.yaml"), tempDir.newFile("v1_1.gocd.yaml")); FileUtils.copyInputStreamToFile(getResourceAsStream("/parts/roots/version_not_present.yaml"), tempDir.newFile("v1_not_present.gocd.yaml")); FileUtils.copyInputStreamToFile(getResourceAsStream("/parts/roots/version_1.yaml"), tempDir.newFile("v1_2.gocd.yaml")); GoPluginApiResponse response = parseAndGetResponseForDir(tempDir.getRoot()); assertNoError(getJsonObjectFromResponse(response)); } @Test public void shouldFailToUpdateTargetVersionWhenItIs_NOT_TheSameAcrossAllFiles() throws Exception { FileUtils.copyInputStreamToFile(getResourceAsStream("/parts/roots/version_1.yaml"), tempDir.newFile("v1_1.gocd.yaml")); FileUtils.copyInputStreamToFile(getResourceAsStream("/parts/roots/version_1.yaml"), tempDir.newFile("v1_2.gocd.yaml")); FileUtils.copyInputStreamToFile(getResourceAsStream("/parts/roots/version_2.yaml"), tempDir.newFile("v2_1.gocd.yaml")); GoPluginApiResponse response = parseAndGetResponseForDir(tempDir.getRoot()); String expectedFailureMessage = "java.lang.RuntimeException: Versions across files are not unique. Found" + " versions: [1, 2]. There can only be one version across the whole repository."; assertFirstError(getJsonObjectFromResponse(response), expectedFailureMessage, "YAML config plugin"); } @Test public void shouldRespondWithCapabilities() throws UnhandledRequestTypeException { String expected = new Gson().toJson(new Capabilities()); DefaultGoPluginApiRequest request = new DefaultGoPluginApiRequest("configrepo", "2.0", "get-capabilities"); GoPluginApiResponse response = plugin.handle(request); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); assertThat(response.responseBody(), is(expected)); } @Test public void shouldRespondWithGetIcon() throws UnhandledRequestTypeException, IOException { DefaultGoPluginApiRequest request = new DefaultGoPluginApiRequest("configrepo", "2.0", "get-icon"); GoPluginApiResponse response = plugin.handle(request); assertThat(response.responseCode(), is(SUCCESS_RESPONSE_CODE)); JsonObject jsonObject = getJsonObjectFromResponse(response); assertEquals(jsonObject.entrySet().size(), 2); assertEquals(jsonObject.get("content_type").getAsString(), "image/svg+xml"); byte[] actualData = Base64.getDecoder().decode(jsonObject.get("data").getAsString()); byte[] expectedData = IOUtils.toByteArray(getClass().getResourceAsStream("/yaml.svg")); assertArrayEquals(expectedData, actualData); } private File setupCase(String caseName) throws IOException { return setupCase(caseName, "gocd.yaml"); } private File setupCase(String caseName, String extension) throws IOException { File simpleFile = tempDir.newFile(caseName + "." + extension); FileUtils.copyInputStreamToFile(getResourceAsStream("examples/" + caseName + ".gocd.yaml"), simpleFile); return tempDir.getRoot(); } private GoPluginApiResponse parseAndGetResponseForDir(File directory) throws UnhandledRequestTypeException { DefaultGoPluginApiRequest parseDirectoryRequest = new DefaultGoPluginApiRequest("configrepo", "1.0", "parse-directory"); String requestBody = "{\n" + " \"directory\":\"" + directory + "\",\n" + " \"configurations\":[]\n" + "}"; parseDirectoryRequest.setRequestBody(requestBody); return plugin.handle(parseDirectoryRequest); } private void assertNoError(JsonObject responseJsonObject) { assertThat(responseJsonObject.get("errors"), Is.<JsonElement>is(new JsonArray())); } private void assertFirstError(JsonObject responseJsonObject, String expectedMessage, String expectedLocation) { JsonArray errors = (JsonArray) responseJsonObject.get("errors"); assertThat(errors.get(0).getAsJsonObject().getAsJsonPrimitive("message").getAsString(), is(expectedMessage)); assertThat(errors.get(0).getAsJsonObject().getAsJsonPrimitive("location").getAsString(), is(expectedLocation)); } private JsonObject getJsonObjectFromResponse(GoPluginApiResponse response) { String responseBody = response.responseBody(); return parser.parse(responseBody).getAsJsonObject(); } }
/* * BSD 3-Clause License * * Copyright (c) 2015-2020, Jianping Zeng. * All rights reserved. * * Please refer the LICENSE for detail. */ package backend.mc; import backend.target.TID; /** * @author Jianping Zeng * @version 0.4 */ public class MCInstrDesc { /** * The opcode of this instruction specfified with target machine. */ public int opCode; /** * Assembly language mnemonic for the opcode. */ public String name; /** * Number of args; -1 if variable #args */ public int numOperands; /** * Number of args that are definitions. */ public int numDefs; /** * enum identifying instr sched class. */ public int schedClass; /** * How many bytes of this instruction encoding. */ public int size; /** * flags identifying machine instr class */ public int flags; /** * Target Specific Flag values */ public long tSFlags; /** * Registers implicitly read by this instr */ public int[] implicitUses; /** * Registers implicitly defined by this instr */ public int[] implicitDefs; /** * Reg classes completely "clobbered". */ public MCRegisterClass[] rcBarriers; /** * {@linkplain this#numOperands} entries about operands. */ public MCOperandInfo[] opInfo; /** * The constructor that creats an instance of class {@linkplain MCInstrDesc} * with the specified several parameters. * * @param opcode The opcode. * @param numOperands The number of operands are desired. * @param numDefs The number of operand defined by this instruction. * @param name The instruction memonic. * @param flags The flags indicating machine instruction class. * @param tSFlags The target-specified flags. * @param implUses The implicitly used register. * @param implDefs The implicit registers defined by this instruction. */ public MCInstrDesc(int opcode, int numOperands, int numDefs, int schedClass, int size, String name, int flags, long tSFlags, int[] implUses, int[] implDefs, MCRegisterClass[] rcBarriers, MCOperandInfo[] opInfo) { opCode = opcode; this.numOperands = numOperands; this.numDefs = numDefs; this.schedClass = schedClass; this.size = size; this.name = name; this.flags = flags; this.tSFlags = tSFlags; implicitUses = implUses; implicitDefs = implDefs; this.rcBarriers = rcBarriers; this.opInfo = opInfo; } /** * Get an operand tied to defined operand. The specified opNum is * index to general operand. * * @param opNum * @param constraint * @return */ public int getOperandConstraint(int opNum, int constraint) { if (opNum < numOperands && (opInfo[opNum].constraints & (1 << constraint)) != 0) { int pos = 16 + constraint * 4; return (opInfo[opNum].constraints >> pos) & 0xf; } return -1; } public int getOpcode() { return opCode; } public int getSize() { return size; } public String getName() { return name; } public int getNumOperands() { return numOperands; } public int getNumDefs() { return numDefs; } public boolean isVariadic() { return (flags & (1 << TID.Variadic)) != 0; } public boolean hasOptionalDef() { return (flags & (1 << TID.OptionalDef)) != 0; } public int[] getImplicitUses() { return implicitUses; } public int[] getImplicitDefs() { return implicitDefs; } public boolean hasImplicitUseOfPhysReg(int reg) { if (implicitUses != null) for (int i = 0; i < implicitUses.length; i++) if (implicitUses[i] == reg) return true; return false; } public boolean hasImplicitUDefOfPhysReg(int reg) { if (implicitDefs != null) for (int i = 0; i < implicitDefs.length; i++) if (implicitDefs[i] == reg) return true; return false; } public MCRegisterClass[] getRegClassBarriers() { return rcBarriers; } public int getSchedClass() { return schedClass; } public boolean isReturn() { return (flags & (1 << TID.Return)) != 0; } public boolean isCall() { return (flags & (1 << TID.Call)) != 0; } public boolean isBarrier() { return (flags & (1 << TID.Barrier)) != 0; } public boolean isTerminator() { return (flags & (1 << TID.Terminator)) != 0; } public boolean isBranch() { return (flags & (1 << TID.Branch)) != 0; } public boolean isIndirectBranch() { return (flags & (1 << TID.IndirectBranch)) != 0; } public boolean isConditionalBranch() { return isBranch() & !isBarrier() & !isIndirectBranch(); } public boolean isUnconditionalBranch() { return isBranch() & isBarrier() & !isIndirectBranch(); } public boolean isPredicable() { return (flags & (1 << TID.Predicable)) != 0; } public boolean isNotDuplicable() { return (flags & (1 << TID.NotDuplicable)) != 0; } public boolean hasDelaySlot() { return (flags & (1 << TID.DelaySlot)) != 0; } public boolean canFoldAsLoad() { return (flags & (1 << TID.FoldAsLoad)) != 0; } public boolean mayLoad() { return (flags & (1 << TID.MayLoad)) != 0; } public boolean mayStore() { return (flags & (1 << TID.MayStore)) != 0; } public boolean hasUnmodeledSideEffects() { return (flags & (1 << TID.SideEffects)) != 0; } public boolean isCommutable() { return (flags & (1 << TID.Commutable)) != 0; } public boolean isConvertibleTo3Addr() { return (flags & (1 << TID.ConvertibleToThreeAddress)) != 0; } public boolean usesCustomInsertionHook() { return (flags & (1 << TID.UsesCustomInserter)) != 0; } public boolean isRematerializable() { return (flags & (1 << TID.ReMaterializable)) != 0; } public boolean isAsCheapAsAMove() { return (flags & (1 << TID.CheapAsAMove)) != 0; } }
/******************************************************************************* * Code contributed to the webinos project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Copyright 2012 Ziran Sun, Samsung Electronics(UK) Ltd * ******************************************************************************/ package org.webinos.impl.discovery; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Set; import java.util.ArrayList; import android.util.Log; import android.os.Handler; import android.os.Looper; import org.meshpoint.anode.AndroidContext; import org.meshpoint.anode.idl.Dictionary; import org.meshpoint.anode.module.IModule; import org.meshpoint.anode.module.IModuleContext; import org.webinos.api.PendingOperation; import org.webinos.api.discovery.DiscoveryManager; import org.webinos.api.discovery.Filter; import org.webinos.api.discovery.FindCallback; import org.webinos.api.discovery.Options; import org.webinos.api.discovery.Service; import org.webinos.api.discovery.ServiceType; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothSocket; import android.content.Context; public class DiscoveryHRMImpl extends DiscoveryManager implements IModule { private Context androidContext; private BluetoothAdapter mBluetoothAdapter; private static final String TAG = "org.webinos.impl.DiscoveryHRMImpl"; private static final boolean D = true; /* hard coded array length */ ArrayList<BluetoothDevice> devicesAvailable = new ArrayList<BluetoothDevice>(10); ArrayList<BluetoothDevice> devicesFound = new ArrayList<BluetoothDevice>(10); DiscoveryServiceImpl srv = new DiscoveryServiceImpl(); //DEMO parameters private BluetoothSocket mmSocket; private String mHxMName = null; private String mHxMAddress = null; private ConnectedThread mConnectedThread = null; /***************************** * DiscoveryManager methods *****************************/ @Override public synchronized PendingOperation findServices( ServiceType serviceType, FindCallback findCallback, Options options, Filter filter) { if(D) Log.v(TAG, "DiscoveryHRMImpl: findservices"); if(serviceType == null) { Log.e(TAG, "DiscoveryHRMImpl: Please specify a serviceType"); return null; } DiscoveryRunnable bluetoothFindService = new BluetoothFindService(serviceType, findCallback, options, filter); Thread t = new Thread(bluetoothFindService); t.start(); Log.v(TAG, "findServices - thread started with id "+(int)t.getId()); return new DiscoveryPendingOperation(t, bluetoothFindService); } public void advertServices(String serviceType){ //start advertisement } public String getServiceId(String serviceType){ // TODO Auto-generated method stub - this probably is not applicable for BT discovery return null; } public Service createService(){ DiscoveryServiceImpl srv = new DiscoveryServiceImpl(); return srv; } /***************************** * IModule methods *****************************/ @Override public Object startModule(IModuleContext ctx) { if(D) Log.v(TAG, "DiscoveryHRMImpl: startModule"); androidContext = ((AndroidContext)ctx).getAndroidContext(); mBluetoothAdapter = getDefaultBluetoothAdapter(); if (mBluetoothAdapter == null) { if(D) Log.e(TAG, "Bluetooth is not available"); return null; } else{ if (!mBluetoothAdapter.isEnabled()){ if(D) Log.d(TAG, "Bluetooth is not enabled"); // TODO start UI activity to enable Bluetooth return null; } else { Log.d(TAG, "Found Bluetooth adapter"); return this; } } } @Override public void stopModule() { /* * perform any module shutdown here ... */ if (mConnectedThread != null) { mConnectedThread.cancel(); mConnectedThread = null; } Log.v(TAG, "DiscoveryHRMImpl: stopModule"); } /***************************** * Helpers *****************************/ private static BluetoothAdapter getDefaultBluetoothAdapter() { // Check if the calling thread is the main application thread, // if it is, do it directly. if (Thread.currentThread().equals(Looper.getMainLooper().getThread())) { Log.v(TAG, "main thread - get bluetooth"); return BluetoothAdapter.getDefaultAdapter(); } // If the calling thread, isn't the main application thread, // then get the main application thread to return the default adapter. final ArrayList<BluetoothAdapter> adapters = new ArrayList<BluetoothAdapter>(1); final Object mutex = new Object(); Handler handler = new Handler(Looper.getMainLooper()); handler.post(new Runnable() { @Override public void run() { adapters.add(BluetoothAdapter.getDefaultAdapter()); synchronized (mutex) { mutex.notify(); } } }); while (adapters.isEmpty()) { Log.d(TAG, "wait for adapter"); synchronized (mutex) { try { mutex.wait(1000L); } catch (InterruptedException e) { Log.e(TAG, "Interrupted while waiting for default bluetooth adapter", e); } } } if (adapters.get(0) == null) { Log.e(TAG, "No bluetooth adapter found!"); } return adapters.get(0); } class BluetoothFindService implements DiscoveryRunnable { private ServiceType serviceType; private FindCallback findCallback; private Options options; private Filter filter; private boolean stopped; private BluetoothFindService(ServiceType srvType, FindCallback findCB, Options opts, Filter fltr) { serviceType = srvType; findCallback = findCB; options = opts; filter = fltr; stopped = false; if(D) Log.v(TAG,"constructed BluetoothFindService"); } public synchronized boolean isStopped() { return stopped; } public synchronized void stop() { stopped = true; } public void run() { //Simply connect to the HRM device Log.d(TAG, "Connecting to HRM"); //get the list of bonded devices Set<BluetoothDevice> devicesPaired = mBluetoothAdapter.getBondedDevices(); if (devicesPaired.isEmpty() && devicesAvailable.isEmpty()) Log.e(TAG, "No bluetooth device is available"); //TODO: check if HRM is paired else{ //Assume that HXM device is paired if(!devicesPaired.isEmpty()){ for (BluetoothDevice device : devicesPaired) { String deviceName = device.getName(); if ( deviceName.startsWith("HXM") ) { /* * we found an HxM to try to talk to!, let's remember its name and * stop looking for more */ mHxMAddress = device.getAddress(); mHxMName = device.getName(); Log.d(TAG,"getConnectedHxm() found a device whose name starts with 'HXM', its name is "+mHxMName+" and its address is ++mHxMAddress"); // start connecting to Hxm BluetoothSocket tmp = null; try { Method m = device.getClass().getMethod("createRfcommSocket", new Class[] {int.class}); tmp = (BluetoothSocket) m.invoke(device, 1); }catch (SecurityException e) { Log.e(TAG, "ConnectThread() SecurityException"); e.printStackTrace(); } catch (NoSuchMethodException e) { Log.e(TAG, "ConnectThread() SecurityException"); e.printStackTrace(); } catch (IllegalArgumentException e) { Log.e(TAG, "ConnectThread() SecurityException"); e.printStackTrace(); } catch (IllegalAccessException e) { Log.e(TAG, "ConnectThread() SecurityException"); e.printStackTrace(); } catch (InvocationTargetException e) { Log.e(TAG, "ConnectThread() SecurityException"); e.printStackTrace(); } mmSocket = tmp; try { // This is a blocking call and will only return on a successful connection or an exception mmSocket.connect(); } catch (IOException e) { //inform widget that socket is not connected long[] values = {1000, 0 , 0, 0, 0, 0}; srv.values = values; findCallback.onFound(srv); Log.d(TAG, "END connectionFailed"); // Close the socket try { mmSocket.close(); } catch (IOException e2) { Log.e(TAG, "ConnectThread.run(): unable to close() socket during connection failure", e2); } } // srv.api = serviceType.api; // Cancel any thread currently running a connection if (mConnectedThread != null) { mConnectedThread.cancel(); mConnectedThread = null; } mConnectedThread = new ConnectedThread(mmSocket, findCallback); mConnectedThread.start(); } } } } } //end of run } // end of runnable /* * This thread runs during a connection with the Hxm. * It handles all incoming data */ private class ConnectedThread extends Thread { private final BluetoothSocket mmSocket; private final FindCallback findCallback; private final InputStream mmInStream; public ConnectedThread(BluetoothSocket socket, FindCallback findCB) { Log.d(TAG, "ConnectedThread(): starting"); mmSocket = socket; findCallback = findCB; InputStream tmpIn = null; // Get the BluetoothSocket input and output streams try { tmpIn = socket.getInputStream(); } catch (IOException e) { Log.e(TAG, "ConnectedThread(): temp sockets not created", e); } mmInStream = tmpIn; Log.d(TAG, "ConnectedThread(): finished"); } /* * The code below is a basic implementation of a reader specific to the HxM device. It is * intended to illustrate the packet structure and field extraction. Consider if your * implementation should include more robust error detection logic to prevent things like * buffer sizes from causing read overruns, or recomputing the CRC and comparing it to the * contents of the message to detect transmission erros. */ private final int STX = 0x02; private final int MSGID = 0x26; private final int DLC = 55; private final int ETX = 0x03; @Override public void run() { Log.d(TAG, "ConnectedThread.run(): starting"); byte[] buffer = new byte[1024]; int b = 0; int bufferIndex = 0; int payloadBytesRemaining; // Keep listening to the InputStream while connected while (true) { try { bufferIndex = 0; // Read bytes from the stream until we encounter the the start of message character while (( b = mmInStream.read()) != STX ) ; buffer[bufferIndex++] = (byte) b; // The next byte must be the message ID, see the basic message format in the document if ((b = mmInStream.read()) != MSGID ) continue; buffer[bufferIndex++] = (byte) b; // The next byte must be the expected data length code, we don't handle variable length messages, see the doc if ((b = mmInStream.read()) != DLC ) continue; buffer[bufferIndex++] = (byte) b; payloadBytesRemaining = b; while ( (payloadBytesRemaining--) > 0 ) { buffer[bufferIndex++] = (byte) (b = mmInStream.read()); } // The next byte should be a CRC buffer[bufferIndex++] = (byte) (b = mmInStream.read()); // The next byte must be the end of text indicator, or there was sadness, see the basic message format in the document if ((b = mmInStream.read()) != ETX ) continue; buffer[bufferIndex++] = (byte) b; Log.d(TAG, "mConnectedThread: read "+Integer.toString(bufferIndex)+" bytes"); long[] values = {0, 0 , 0, 0, 0, 0}; srv.values = values; HrmReading hrm = new HrmReading( buffer, srv.values); findCallback.onFound(srv); } catch (IOException e) { Log.e(TAG, "disconnected", e); //connectionLost(); break; } } Log.d(TAG, "ConnectedThread.run(): finished"); } public void cancel() { Log.e(TAG, "cancel connection"); try { mmInStream.close();} catch (IOException e) { Log.e(TAG, "ConnectedThread.cancel(): close() of InputStream failed", e); } try { mmSocket.close(); } catch (IOException e) { Log.e(TAG, "ConnectedThread.cancel(): close() of connect socket failed", e); } } } //start of HrmReading public class HrmReading implements Dictionary { public final int STX = 0x02; public final int MSGID = 0x26; public final int DLC = 55; public final int ETX = 0x03; private static final String TAG = "HrmReading"; int serial; byte stx; byte msgId; byte dlc; int firmwareId; int firmwareVersion; int hardWareId; int hardwareVersion; int batteryIndicator; int heartRate; int heartBeatNumber; long hbTime1; long hbTime2; long hbTime3; long hbTime4; long hbTime5; long hbTime6; long hbTime7; long hbTime8; long hbTime9; long hbTime10; long hbTime11; long hbTime12; long hbTime13; long hbTime14; long hbTime15; long reserved1; long reserved2; long reserved3; long distance; long speed; byte strides; byte reserved4; long reserved5; byte crc; byte etx; long[] values = {0, 0 , 0, 0, 0, 0}; public HrmReading (byte[] buffer, long[] val) { int bufferIndex = 0; values = val; Log.d ( TAG, "HrmReading being built from byte buffer"); try { stx = buffer[bufferIndex++]; msgId = buffer[bufferIndex++]; dlc = buffer[bufferIndex++]; firmwareId = (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); firmwareVersion = (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hardWareId = (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hardwareVersion = (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); batteryIndicator = (int)(0x000000FF & (int)(buffer[bufferIndex++])); heartRate = (int)(0x000000FF & (int)(buffer[bufferIndex++])); heartBeatNumber = (int)(0x000000FF & (int)(buffer[bufferIndex++])); hbTime1 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime2 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime3 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime4 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime5 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime6 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime7 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime8 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime9 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime10 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime11 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime12 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime13 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime14 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); hbTime15 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); reserved1 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); reserved2 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); reserved3 = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); distance = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); speed = (long) (int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); strides = buffer[bufferIndex++]; reserved4 = buffer[bufferIndex++]; reserved5 = (long)(int)((0x000000FF & (int)buffer[bufferIndex++]) | (int)(0x000000FF & (int)buffer[bufferIndex++])<< 8); crc = buffer[bufferIndex++]; etx = buffer[bufferIndex]; } catch (Exception e) { Log.d(TAG, "Failure building HrmReading from byte buffer, probably an incopmplete or corrupted buffer"); } Log.d(TAG, "Building HrmReading from byte buffer complete, consumed " + bufferIndex + " bytes in the process"); if ( etx != ETX ) Log.e(TAG,"...ETX mismatch! The HxM message was not parsed properly"); //pass values to srv Log.d(TAG,"...heartRate "+ ( heartRate )); val[0] = (long)(int)heartRate; Log.d(TAG,"...heartBeatNumber "+ ( heartBeatNumber )); val[1] = (long)(int)heartBeatNumber; Log.d(TAG,"...distance "+ ( distance )); val[2] = distance; Log.d(TAG,"...speed "+ ( speed )); val[3] = speed; Log.d(TAG,"...strides "+ ( strides )); val[4] = strides; } } // end of HXM reading abstract interface DiscoveryRunnable extends Runnable { //for supports on PendingOperation public abstract void stop(); public abstract boolean isStopped(); } class DiscoveryPendingOperation extends PendingOperation { private Thread t=null; private DiscoveryRunnable r=null; public DiscoveryPendingOperation(Thread t, DiscoveryRunnable r) { this.t = t; this.r = r; } public void cancel() { Log.d(TAG, "DiscoveryPendingOperation cancel"); if(t!=null) { Log.v(TAG, "DiscoveryPendingOperation cancel - send interrupt..."); //Is this interrupt needed??? - copied from messaging t.interrupt(); if(r!=null) r.stop(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.accumulo.core.clientImpl; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.function.Predicate; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.admin.CloneConfiguration; import org.apache.accumulo.core.client.admin.CompactionConfig; import org.apache.accumulo.core.client.admin.DiskUsage; import org.apache.accumulo.core.client.admin.Locations; import org.apache.accumulo.core.client.admin.NewTableConfiguration; import org.apache.accumulo.core.client.admin.SummaryRetriever; import org.apache.accumulo.core.client.sample.SamplerConfiguration; import org.apache.accumulo.core.client.summary.SummarizerConfiguration; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope; import org.apache.accumulo.core.security.Authorizations; import org.apache.hadoop.io.Text; import org.junit.Test; public class TableOperationsHelperTest { static class Tester extends TableOperationsHelper { Map<String,Map<String,String>> settings = new HashMap<>(); @Override public SortedSet<String> list() { return null; } @Override public boolean exists(String tableName) { return true; } @Override public void create(String tableName) {} @Override public void create(String tableName, NewTableConfiguration ntc) {} @Override public void addSplits(String tableName, SortedSet<Text> partitionKeys) {} @Override public Collection<Text> listSplits(String tableName) { return null; } @Override public Collection<Text> listSplits(String tableName, int maxSplits) { return null; } @Override public Text getMaxRow(String tableName, Authorizations auths, Text startRow, boolean startInclusive, Text endRow, boolean endInclusive) { return null; } @Override public void merge(String tableName, Text start, Text end) { } @Override public void deleteRows(String tableName, Text start, Text end) {} @Override public void compact(String tableName, Text start, Text end, boolean flush, boolean wait) {} @Override public void compact(String tableName, Text start, Text end, List<IteratorSetting> iterators, boolean flush, boolean wait) {} @Override public void compact(String tableName, CompactionConfig config) {} @Override public void delete(String tableName) {} @Override public void clone(String srcTableName, String newTableName, boolean flush, Map<String,String> propertiesToSet, Set<String> propertiesToExclude) {} @Override public void clone(String srcTableName, String newTableName, CloneConfiguration config) {} @Override public void rename(String oldTableName, String newTableName) {} @Override public void flush(String tableName) {} @Override public void flush(String tableName, Text start, Text end, boolean wait) {} @Override public void setProperty(String tableName, String property, String value) { if (!settings.containsKey(tableName)) settings.put(tableName, new TreeMap<>()); settings.get(tableName).put(property, value); } @Override public void removeProperty(String tableName, String property) { if (!settings.containsKey(tableName)) return; settings.get(tableName).remove(property); } @Override public Map<String,String> getConfiguration(String tableName) { Map<String,String> empty = Collections.emptyMap(); if (!settings.containsKey(tableName)) return empty; return settings.get(tableName); } @Override public void setLocalityGroups(String tableName, Map<String,Set<Text>> groups) {} @Override public Map<String,Set<Text>> getLocalityGroups(String tableName) { return null; } @Override public Set<Range> splitRangeByTablets(String tableName, Range range, int maxSplits) { return null; } @Override @Deprecated(since = "2.0.0") public void importDirectory(String tableName, String dir, String failureDir, boolean setTime) {} @Override public void offline(String tableName) { } @Override public boolean isOnline(String tableName) { return true; } @Override public void online(String tableName) {} @Override public void offline(String tableName, boolean wait) { } @Override public void online(String tableName, boolean wait) {} @Override public void clearLocatorCache(String tableName) {} @Override public Map<String,String> tableIdMap() { return null; } @Override public List<DiskUsage> getDiskUsage(Set<String> tables) { return null; } @Override public void importTable(String tableName, Set<String> exportDir) {} @Override public void exportTable(String tableName, String exportDir) {} @Override public void cancelCompaction(String tableName) {} @Override public boolean testClassLoad(String tableName, String className, String asTypeName) { return false; } @Override public void setSamplerConfiguration(String tableName, SamplerConfiguration samplerConfiguration) { throw new UnsupportedOperationException(); } @Override public void clearSamplerConfiguration(String tableName) { throw new UnsupportedOperationException(); } @Override public SamplerConfiguration getSamplerConfiguration(String tableName) { throw new UnsupportedOperationException(); } @Override public Locations locate(String tableName, Collection<Range> ranges) { throw new UnsupportedOperationException(); } @Override public SummaryRetriever summaries(String tableName) { throw new UnsupportedOperationException(); } @Override public void addSummarizers(String tableName, SummarizerConfiguration... summarizerConf) { throw new UnsupportedOperationException(); } @Override public void removeSummarizers(String tableName, Predicate<SummarizerConfiguration> predicate) { throw new UnsupportedOperationException(); } @Override public List<SummarizerConfiguration> listSummarizers(String tableName) { throw new UnsupportedOperationException(); } } protected TableOperationsHelper getHelper() { return new Tester(); } void check(TableOperationsHelper t, String tablename, String[] values) throws Exception { Map<String,String> expected = new TreeMap<>(); for (String value : values) { String[] parts = value.split("=", 2); expected.put(parts[0], parts[1]); } Map<String,String> actual = Map.copyOf(t.getConfiguration(tablename)); assertEquals(expected, actual); } @Test public void testAttachIterator() throws Exception { TableOperationsHelper t = getHelper(); Map<String,String> empty = Collections.emptyMap(); t.attachIterator("table", new IteratorSetting(10, "someName", "foo.bar", empty), EnumSet.of(IteratorScope.scan)); check(t, "table", new String[] {"table.iterator.scan.someName=10,foo.bar",}); t.removeIterator("table", "someName", EnumSet.of(IteratorScope.scan)); check(t, "table", new String[] {}); IteratorSetting setting = new IteratorSetting(10, "someName", "foo.bar"); setting.addOptions(Collections.singletonMap("key", "value")); t.attachIterator("table", setting, EnumSet.of(IteratorScope.majc)); setting = new IteratorSetting(10, "someName", "foo.bar"); t.attachIterator("table", setting, EnumSet.of(IteratorScope.scan)); check(t, "table", new String[] {"table.iterator.majc.someName=10,foo.bar", "table.iterator.majc.someName.opt.key=value", "table.iterator.scan.someName=10,foo.bar",}); t.removeIterator("table", "someName", EnumSet.of(IteratorScope.scan)); setting = new IteratorSetting(20, "otherName", "some.classname"); setting.addOptions(Collections.singletonMap("key", "value")); t.attachIterator("table", setting, EnumSet.of(IteratorScope.majc)); setting = new IteratorSetting(20, "otherName", "some.classname"); t.attachIterator("table", setting, EnumSet.of(IteratorScope.scan)); Map<String,EnumSet<IteratorScope>> two = t.listIterators("table"); assertEquals(2, two.size()); assertTrue(two.containsKey("otherName")); assertEquals(2, two.get("otherName").size()); assertTrue(two.get("otherName").contains(IteratorScope.majc)); assertTrue(two.get("otherName").contains(IteratorScope.scan)); assertTrue(two.containsKey("someName")); assertEquals(1, two.get("someName").size()); assertTrue(two.get("someName").contains(IteratorScope.majc)); t.removeIterator("table", "someName", EnumSet.allOf(IteratorScope.class)); check(t, "table", new String[] {"table.iterator.majc.otherName=20,some.classname", "table.iterator.majc.otherName.opt.key=value", "table.iterator.scan.otherName=20,some.classname",}); setting = t.getIteratorSetting("table", "otherName", IteratorScope.scan); assertEquals(20, setting.getPriority()); assertEquals("some.classname", setting.getIteratorClass()); assertTrue(setting.getOptions().isEmpty()); setting = t.getIteratorSetting("table", "otherName", IteratorScope.majc); assertEquals(20, setting.getPriority()); assertEquals("some.classname", setting.getIteratorClass()); assertFalse(setting.getOptions().isEmpty()); assertEquals(Collections.singletonMap("key", "value"), setting.getOptions()); t.attachIterator("table", setting, EnumSet.of(IteratorScope.minc)); check(t, "table", new String[] {"table.iterator.majc.otherName=20,some.classname", "table.iterator.majc.otherName.opt.key=value", "table.iterator.minc.otherName=20,some.classname", "table.iterator.minc.otherName.opt.key=value", "table.iterator.scan.otherName=20,some.classname",}); try { t.attachIterator("table", setting); fail(); } catch (AccumuloException e) { // expected, ignore } setting.setName("thirdName"); try { t.attachIterator("table", setting); fail(); } catch (AccumuloException e) {} setting.setPriority(10); t.setProperty("table", "table.iterator.minc.thirdName.opt.key", "value"); try { t.attachIterator("table", setting); fail(); } catch (AccumuloException e) {} t.removeProperty("table", "table.iterator.minc.thirdName.opt.key"); t.attachIterator("table", setting); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. The ASF licenses this file to You * under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. For additional information regarding * copyright in this work, please see the NOTICE file in the top level * directory of this distribution. */ package org.apache.roller.ui.authoring.struts.actions; import java.io.IOException; import java.text.DateFormat; import java.text.ParseException; import java.util.Date; import java.util.List; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.actions.DispatchAction; import org.apache.roller.RollerException; import org.apache.roller.model.RollerFactory; import org.apache.roller.model.WeblogManager; import org.apache.roller.pojos.WeblogCategoryData; import org.apache.roller.pojos.WeblogEntryData; import org.apache.roller.pojos.WebsiteData; import org.apache.roller.ui.core.BasePageModel; import org.apache.roller.ui.core.RollerRequest; import org.apache.roller.ui.core.RollerSession; import org.apache.roller.ui.authoring.struts.formbeans.WeblogEntryManagementForm; import org.apache.roller.util.DateUtil; ///////////////////////////////////////////////////////////////////////////// /** * Query weblog entries and display the results in tabular form. * * @struts.action path="/roller-ui/authoring/weblogEntryManagement" name="weblogEntryManagementForm" * scope="request" parameter="method" * * @struts.action-forward name="weblogEntryManagement.page" path=".WeblogEntryManagement" * * @author Dave Johnson */ public final class WeblogEntryManagementAction extends DispatchAction { //----------------------------------------------------------------------- /** * Respond to request to add a new or edit an existing weblog entry. * Loads the appropriate model objects and forwards the request to * the edit weblog page. */ public ActionForward query( ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException, RollerException { WeblogEntryManagementForm form = (WeblogEntryManagementForm)actionForm; RollerRequest rreq = RollerRequest.getRollerRequest(request); WeblogManager wmgr = RollerFactory.getRoller().getWeblogManager(); RollerSession rses = RollerSession.getRollerSession(request); // ensure that weblog is specfied and user has permission to work there if (rreq.getWebsite() != null && rses.isUserAuthorized(rreq.getWebsite())) { String status= form.getStatus().equals("ALL") ? null : form.getStatus(); request.setAttribute("model", new PageModel( request, response, mapping, rreq.getWebsite(), form)); } else { return mapping.findForward("access-denied"); } return mapping.findForward("weblogEntryManagement.page"); } public class PageModel extends BasePageModel { private List entries = null; private RollerRequest rollerRequest = null; private HttpServletRequest request = null; private WebsiteData website = null; private String category = null; private Date startDate = null; private Date endDate = null; private String status = WeblogEntryData.PUBLISHED; private Integer maxEntries = null; private boolean more = false; private WeblogEntryManagementForm queryForm = null; public PageModel( HttpServletRequest request, HttpServletResponse response, ActionMapping mapping, WebsiteData website, WeblogEntryManagementForm queryForm) throws RollerException { super("weblogEntryQuery.title", request, response, mapping); rollerRequest = RollerRequest.getRollerRequest(request); this.request = request; this.queryForm = queryForm; this.website = website; if (null != queryForm.getCategoryId() && !queryForm.getCategoryId().equals("")) { WeblogManager wmgr = RollerFactory.getRoller().getWeblogManager(); WeblogCategoryData cd = wmgr.getWeblogCategory(queryForm.getCategoryId()); category = cd.getPath(); } final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, request.getLocale()); String start = queryForm.getStartDateString(); if (null != start && start.trim().length() > 0) { try { startDate = DateUtil.getStartOfDay(df.parse(start)); } catch (ParseException e) { throw new RollerException("ERROR parsing start date."); } } String end = queryForm.getEndDateString(); if (null != end && end.trim().length() > 0) { try { endDate = DateUtil.getEndOfDay(df.parse(end)); } catch (ParseException e) { throw new RollerException("ERROR parsing end date."); } } this.status = "ALL".equals(queryForm.getStatus()) ? null: queryForm.getStatus(); this.maxEntries = maxEntries; entries = RollerFactory.getRoller().getWeblogManager().getWeblogEntries( website, null, startDate, endDate, category, status, queryForm.getSortby(), null, queryForm.getOffset(), queryForm.getCount() + 1); if (entries.size() > queryForm.getCount()) { more = true; entries.remove(entries.size()-1); } } public String getBaseURL() { return getRequest().getContextPath(); } /** * Get recent weblog entries using request parameters to determine * username, date, and category name parameters. * @return List of WeblogEntryData objects. * @throws RollerException */ public List getRecentWeblogEntries() throws RollerException { return entries; } public int getWeblogEntryCount() { return entries.size(); } public List getCategories() throws Exception { RollerRequest rreq = RollerRequest.getRollerRequest(request); List categories = RollerFactory.getRoller().getWeblogManager() .getWeblogCategories(rreq.getWebsite()); return categories; } public Date getEarliestDate() { Date date = null; if (entries.size() > 0) { WeblogEntryData earliest = (WeblogEntryData)entries.get(entries.size()-1); date = earliest.getPubTime(); } return date; } public Date getLatestDate() { Date date = null; if (entries.size() > 0) { WeblogEntryData latest = (WeblogEntryData)entries.get(0); date = latest.getPubTime(); } return date; } public String getNextLink() { if (more) { int offset = queryForm.getOffset() + queryForm.getCount(); offset = (offset < 0) ? 0 : offset; return getQueryLink() + "&offset=" + offset; } else { return null; } } public String getPrevLink() { if (queryForm.getOffset() > 0) { int offset = queryForm.getOffset() - queryForm.getCount(); offset = (offset < 0) ? 0 : offset; return getQueryLink() + "&offset=" + offset; } else { return null; } } private String getQueryLink() { StringBuffer sb = new StringBuffer(); sb.append(request.getContextPath()); sb.append("/roller-ui/authoring/weblogEntryManagement.do"); // TODO: get path from Struts sb.append("?method=query"); sb.append("&weblog="); sb.append(getWebsite().getHandle()); sb.append("&count="); sb.append(queryForm.getCount()); return sb.toString(); } } }
/* * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ /* * Copyright (C) 2006 University of Waikato */ package weka.core; import java.lang.reflect.Method; import java.util.Enumeration; import java.util.Properties; import java.util.StringTokenizer; import java.util.Vector; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import weka.gui.GenericPropertiesCreator; /** * Tests OptionHandlers. Run from the command line with: * <p/> * java weka.core.OptionHandlerTest * * @author FracPete (fracpete at waikato dot ac dot nz) * @version $Revision: 10160 $ */ public class OptionHandlersTest extends TestCase { /** * tests a specific OptionHandler */ public static class OptionHandlerTest extends TestCase { /** the class to test */ protected String m_Classname; /** the OptionHandler tester */ protected CheckOptionHandler m_OptionTester; /** * Constructs the <code>OptionHandlersTest</code>. * * @param name the name of the test class * @param classname the actual classname */ public OptionHandlerTest(String name, String classname) { super(name); m_Classname = classname; } /** * returns the classname this test is for * * @return the classname */ public String getClassname() { return m_Classname; } /** * configures the optionhandler * * @return the configured optionhandler, null in case of an error */ protected OptionHandler getOptionHandler() { OptionHandler result; try { result = (OptionHandler) Class.forName(m_Classname).newInstance(); } catch (Exception e) { result = null; } return result; } /** * Called by JUnit before each test method. * * @throws Exception if an error occurs */ @Override protected void setUp() throws Exception { super.setUp(); m_OptionTester = new CheckOptionHandler(); m_OptionTester.setOptionHandler(getOptionHandler()); m_OptionTester.setUserOptions(new String[0]); m_OptionTester.setSilent(true); } /** * Called by JUnit after each test method * * @throws Exception if an error occurs */ @Override protected void tearDown() throws Exception { super.tearDown(); m_OptionTester = null; } /** * tests the listing of the options * * @throws Exception if test fails */ public void testListOptions() throws Exception { if (m_OptionTester.getOptionHandler() != null) { if (!m_OptionTester.checkListOptions()) { fail(getClassname() + ": " + "Options cannot be listed via listOptions."); } } } /** * tests the setting of the options * * @throws Exception if test fails */ public void testSetOptions() throws Exception { if (m_OptionTester.getOptionHandler() != null) { if (!m_OptionTester.checkSetOptions()) { fail(getClassname() + ": " + "setOptions method failed."); } } } /** * tests whether there are any remaining options * * @throws Exception if test fails */ public void testRemainingOptions() throws Exception { if (m_OptionTester.getOptionHandler() != null) { if (!m_OptionTester.checkRemainingOptions()) { fail(getClassname() + ": " + "There were 'left-over' options."); } } } /** * tests the whether the user-supplied options stay the same after setting. * getting, and re-setting again. * * @see #m_OptionTester * @throws Exception if test fails */ public void testCanonicalUserOptions() throws Exception { if (m_OptionTester.getOptionHandler() != null) { if (!m_OptionTester.checkCanonicalUserOptions()) { fail(getClassname() + ": " + "setOptions method failed"); } } } /** * tests the resetting of the options to the default ones * * @throws Exception if test fails */ public void testResettingOptions() throws Exception { if (m_OptionTester.getOptionHandler() != null) { if (!m_OptionTester.checkSetOptions()) { fail(getClassname() + ": " + "Resetting of options failed"); } } } } /** * Constructs the <code>OptionHandlersTest</code>. * * @param name the name of the test class */ public OptionHandlersTest(String name) { super(name); } /** * dummy for JUnit, does nothing, only to prevent JUnit from complaining about * "no tests" * * @throws Exception never happens */ public void testDummy() throws Exception { // does nothing, only to prevent JUnit from complaining about "no tests" } /** * generate all tests * * @return all the tests */ public static Test suite() { TestSuite suite = new TestSuite(); try { // determine all test methods in the OptionHandlerTest class Vector<String> testMethods = new Vector<String>(); Method[] methods = OptionHandlerTest.class.getDeclaredMethods(); for (Method method : methods) { if (method.getName().startsWith("test")) { testMethods.add(method.getName()); } } // get all classes that are accessible through the GUI GenericPropertiesCreator creator = new GenericPropertiesCreator(); creator.execute(false); Properties props = creator.getOutputProperties(); // traverse all super-classes Enumeration<?> names = props.propertyNames(); while (names.hasMoreElements()) { String name = names.nextElement().toString(); // add tests for all listed classes StringTokenizer tok = new StringTokenizer(props.getProperty(name, ""), ","); while (tok.hasMoreTokens()) { String classname = tok.nextToken(); // does class implement OptionHandler? try { Class<?> cls = Class.forName(classname); if (!ClassDiscovery.hasInterface(OptionHandler.class, cls)) { continue; } } catch (Exception e) { // some other problem, skip this class continue; } // add tests for this class for (int i = 0; i < testMethods.size(); i++) { suite.addTest(new OptionHandlerTest(testMethods.get(i), classname)); } } } } catch (Exception e) { e.printStackTrace(); } return suite; } /** * for running the tests from commandline * * @param args the commandline arguments - ignored */ public static void main(String[] args) { junit.textui.TestRunner.run(suite()); } }
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.exec.planner.fragment; import java.math.RoundingMode; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import com.dremio.common.exceptions.ExecutionSetupException; import com.dremio.common.nodes.EndpointHelper; import com.dremio.common.util.DremioStringUtils; import com.dremio.exec.ExecConstants; import com.dremio.exec.expr.fn.FunctionLookupContext; import com.dremio.exec.maestro.MaestroObserver; import com.dremio.exec.ops.QueryContext; import com.dremio.exec.physical.PhysicalOperatorSetupException; import com.dremio.exec.physical.base.AbstractPhysicalVisitor; import com.dremio.exec.physical.base.Exchange.ParallelizationDependency; import com.dremio.exec.physical.base.FragmentRoot; import com.dremio.exec.physical.base.PhysicalOperator; import com.dremio.exec.physical.base.Receiver; import com.dremio.exec.planner.PhysicalPlanReader; import com.dremio.exec.planner.fragment.Fragment.ExchangeFragmentPair; import com.dremio.exec.planner.fragment.Materializer.IndexedFragmentNode; import com.dremio.exec.proto.CoordExecRPC; import com.dremio.exec.proto.CoordExecRPC.Collector; import com.dremio.exec.proto.CoordExecRPC.FragmentAssignment; import com.dremio.exec.proto.CoordExecRPC.FragmentCodec; import com.dremio.exec.proto.CoordExecRPC.MajorFragmentAssignment; import com.dremio.exec.proto.CoordExecRPC.MinorAttr; import com.dremio.exec.proto.CoordExecRPC.PlanFragmentMajor; import com.dremio.exec.proto.CoordExecRPC.PlanFragmentMinor; import com.dremio.exec.proto.CoordExecRPC.QueryContextInformation; import com.dremio.exec.proto.CoordinationProtos.NodeEndpoint; import com.dremio.exec.proto.ExecProtos.FragmentHandle; import com.dremio.exec.proto.UserBitShared.QueryId; import com.dremio.exec.work.QueryWorkUnit; import com.dremio.exec.work.foreman.ForemanSetupException; import com.dremio.options.OptionList; import com.dremio.options.OptionManager; import com.dremio.resource.GroupResourceInformation; import com.dremio.resource.ResourceSchedulingDecisionInfo; import com.dremio.resource.SelectedExecutorsResourceInformation; import com.dremio.sabot.op.aggregate.vectorized.VectorizedHashAggOperator; import com.dremio.sabot.op.sort.external.ExternalSortOperator; import com.dremio.sabot.rpc.user.UserSession; import com.dremio.service.Pointer; import com.dremio.service.execselector.ExecutorSelectionContext; import com.dremio.service.execselector.ExecutorSelectionHandle; import com.dremio.service.execselector.ExecutorSelectionService; import com.dremio.service.execselector.ExecutorSelectionUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.math.IntMath; import com.google.common.primitives.Ints; import com.google.protobuf.ByteString; /** * The simple parallelizer determines the level of parallelization of a plan based on the cost of the underlying * operations. It doesn't take into account system load or other factors. Based on the cost of the query, the * parallelization for each major fragment will be determined. Once the amount of parallelization is done, assignment * is done based on round robin assignment ordered by operator affinity (locality) to available execution SabotNodes. */ public class SimpleParallelizer implements ParallelizationParameters { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SimpleParallelizer.class); private final long parallelizationThreshold; private int maxWidthPerNode; private final int maxGlobalWidth; private final double affinityFactor; private final boolean useNewAssignmentCreator; private final double assignmentCreatorBalanceFactor; private final MaestroObserver observer; private final ExecutionNodeMap executionMap; private final FragmentCodec fragmentCodec; private final QueryContext queryContext; private final ResourceSchedulingDecisionInfo resourceSchedulingDecisionInfo; private ExecutorSelectionService executorSelectionService; // NB: re-assigned in unit tests, hence not final private final int targetNumFragsPerNode; private final boolean shouldIgnoreLeafAffinity; public SimpleParallelizer(QueryContext context, MaestroObserver observer, ExecutorSelectionService executorSelectionService) { this(context, observer, executorSelectionService, null, context.getGroupResourceInformation()); } public SimpleParallelizer(QueryContext context, MaestroObserver observer, ExecutorSelectionService executorSelectionService, ResourceSchedulingDecisionInfo resourceSchedulingDecisionInfo, GroupResourceInformation groupResourceInformation) { this.queryContext = context; this.resourceSchedulingDecisionInfo = resourceSchedulingDecisionInfo; OptionManager optionManager = context.getOptions(); long sliceTarget = context.getPlannerSettings().getSliceTarget(); this.parallelizationThreshold = sliceTarget > 0 ? sliceTarget : 1; this.maxGlobalWidth = (int) optionManager.getOption(ExecConstants.MAX_WIDTH_GLOBAL); this.affinityFactor = optionManager.getOption(ExecConstants.AFFINITY_FACTOR); this.useNewAssignmentCreator = !optionManager.getOption(ExecConstants.OLD_ASSIGNMENT_CREATOR); this.assignmentCreatorBalanceFactor = optionManager.getOption(ExecConstants.ASSIGNMENT_CREATOR_BALANCE_FACTOR); this.observer = observer; this.fragmentCodec = FragmentCodec.valueOf(optionManager.getOption(ExecConstants.FRAGMENT_CODEC).toUpperCase()); this.executorSelectionService = executorSelectionService; this.targetNumFragsPerNode = Ints.saturatedCast(optionManager.getOption(ExecutorSelectionService.TARGET_NUM_FRAGS_PER_NODE)); this.shouldIgnoreLeafAffinity = optionManager.getOption(ExecConstants.SHOULD_IGNORE_LEAF_AFFINITY); final ExecutorSelectionHandle handle = executorSelectionService.getAllActiveExecutors(new ExecutorSelectionContext(resourceSchedulingDecisionInfo)); this.executionMap = new ExecutionNodeMap(handle.getExecutors()); computeMaxWidthPerNode(groupResourceInformation); } private void computeMaxWidthPerNode(GroupResourceInformation groupResourceInformation) { OptionManager optionManager = queryContext.getOptions(); final long configuredMaxWidthPerNode = groupResourceInformation.getAverageExecutorCores(optionManager); if (configuredMaxWidthPerNode == 0) { ExecutorSelectionUtils.throwEngineOffline(resourceSchedulingDecisionInfo.getQueueTag()); } final double maxWidthFactor = queryContext.getWorkStatsProvider().get().getMaxWidthFactor(groupResourceInformation); maxWidthPerNode = (int) Math.max(1, configuredMaxWidthPerNode * maxWidthFactor); if (logger.isDebugEnabled() && maxWidthFactor < 1) { final float clusterLoad = queryContext.getWorkStatsProvider().get().getClusterLoad(); logger.debug("Cluster load {} exceeded cutoff, max_width_factor = {}. current max_width = {}", clusterLoad, maxWidthFactor, maxWidthPerNode); } } @VisibleForTesting public SimpleParallelizer(long parallelizationThreshold, int maxWidthPerNode, int maxGlobalWidth, double affinityFactor, MaestroObserver observer, boolean useNewAssignmentCreator, double assignmentCreatorBalanceFactor, boolean shouldIgnoreLeafAffinity) { this.executionMap = new ExecutionNodeMap(Collections.<NodeEndpoint>emptyList()); this.parallelizationThreshold = parallelizationThreshold; this.maxWidthPerNode = maxWidthPerNode; this.maxGlobalWidth = maxGlobalWidth; this.affinityFactor = affinityFactor; this.observer = observer; this.useNewAssignmentCreator = useNewAssignmentCreator; this.assignmentCreatorBalanceFactor = assignmentCreatorBalanceFactor; this.fragmentCodec = FragmentCodec.NONE; this.queryContext = null; this.targetNumFragsPerNode = 1; this.resourceSchedulingDecisionInfo = null; this.shouldIgnoreLeafAffinity = shouldIgnoreLeafAffinity; } @Override public long getSliceTarget() { return parallelizationThreshold; } @Override public int getMaxWidthPerNode() { return maxWidthPerNode; } @Override public int getMaxGlobalWidth() { return maxGlobalWidth; } @Override public double getAffinityFactor() { return affinityFactor; } @Override public boolean useNewAssignmentCreator() { return useNewAssignmentCreator; } @Override public double getAssignmentCreatorBalanceFactor(){ return assignmentCreatorBalanceFactor; } @Override public boolean shouldIgnoreLeafAffinity() { return shouldIgnoreLeafAffinity; } /** * Generate a set of assigned fragments based on the provided fragment tree. Do not allow parallelization stages * to go beyond the global max width. * * @param options Option list * @param foremanNode The driving/foreman node for this query. (this node) * @param queryId The queryId for this query. * @param reader Tool used to read JSON plans * @param rootFragment The root node of the PhysicalPlan that we will be parallelizing. * @param session UserSession of user who launched this query. * @param queryContextInfo Info related to the context when query has started. * @return The list of generated PlanFragment protobuf objects to be assigned out to the individual nodes. * @throws ExecutionSetupException */ @Deprecated // ("only used in test") public List<PlanFragmentFull> getFragments( OptionList options, NodeEndpoint foremanNode, QueryId queryId, PhysicalPlanReader reader, Fragment rootFragment, PlanFragmentsIndex.Builder indexBuilder, UserSession session, QueryContextInformation queryContextInfo, FunctionLookupContext functionLookupContext) throws ExecutionSetupException { observer.planParallelStart(); final Stopwatch stopwatch = Stopwatch.createStarted(); // NB: OK to close resources in unit tests only try (final ExecutionPlanningResources resources = getExecutionPlanningResources(queryContext, observer, executorSelectionService, resourceSchedulingDecisionInfo, rootFragment)) { observer.planParallelized(resources.getPlanningSet()); stopwatch.stop(); observer.planAssignmentTime(stopwatch.elapsed(TimeUnit.MILLISECONDS)); stopwatch.start(); List<PlanFragmentFull> fragments = generateWorkUnit(options, foremanNode, queryId, reader, rootFragment, resources.getPlanningSet(), indexBuilder, session, queryContextInfo, functionLookupContext); stopwatch.stop(); observer.planGenerationTime(stopwatch.elapsed(TimeUnit.MILLISECONDS)); observer.plansDistributionComplete(new QueryWorkUnit(fragments)); return fragments; } catch (Exception e) { // Test-only code. Wrap in a runtime exception, then re-throw if (e instanceof RuntimeException) { throw (RuntimeException)e; } throw new RuntimeException(e); } } /** * Generate set of assigned fragments based on predefined PlanningSet * versus doing parallelization in place * QueryContext has to be not null from construction of Parallelizer * @param options * @param planningSet * @param reader * @param rootFragment * @return * @throws ExecutionSetupException */ public List<PlanFragmentFull> getFragments( OptionList options, PlanningSet planningSet, PhysicalPlanReader reader, Fragment rootFragment, PlanFragmentsIndex.Builder indexBuilder) throws ExecutionSetupException { Preconditions.checkNotNull(queryContext); final Stopwatch stopwatch = Stopwatch.createStarted(); List<PlanFragmentFull> fragments = generateWorkUnit(options, reader, rootFragment, planningSet, indexBuilder); stopwatch.stop(); observer.planGenerationTime(stopwatch.elapsed(TimeUnit.MILLISECONDS)); observer.plansDistributionComplete(new QueryWorkUnit(fragments)); return fragments; } /** * Select executors, parallelize fragments and get the planning resources. * * @ * @param rootFragment * @return */ public static ExecutionPlanningResources getExecutionPlanningResources(QueryContext context, MaestroObserver observer, ExecutorSelectionService executorSelectionService, ResourceSchedulingDecisionInfo resourceSchedulingDecisionInfo, Fragment rootFragment) throws ExecutionSetupException { SimpleParallelizer parallelizer = new SimpleParallelizer(context, observer, executorSelectionService, resourceSchedulingDecisionInfo, context.getGroupResourceInformation()); PlanningSet planningSet = new PlanningSet(); parallelizer.initFragmentWrappers(rootFragment, planningSet); final Set<Wrapper> leafFragments = constructFragmentDependencyGraph(planningSet); // NB: for queries with hard affinity, we need to use all endpoints, so the parallelizer, below, is given an // opportunity to find the nodes that match said affinity // Start parallelizing from leaf fragments Pointer<Boolean> hasHardAffinity = new Pointer<>(false); // Start parallelizing from the leaf fragments. int idealNumFragments = 0; for (Wrapper wrapper : leafFragments) { idealNumFragments += parallelizer.computePhaseStats(wrapper, planningSet, hasHardAffinity); } int idealNumNodes = IntMath.divide(idealNumFragments, parallelizer.targetNumFragsPerNode, RoundingMode.CEILING); final Stopwatch stopWatch = Stopwatch.createStarted(); ExecutorSelectionContext executorContext = new ExecutorSelectionContext(resourceSchedulingDecisionInfo); ExecutorSelectionHandle executorSelectionHandle = hasHardAffinity.value ? executorSelectionService.getAllActiveExecutors(executorContext) : executorSelectionService.getExecutors(idealNumNodes, executorContext); GroupResourceInformation groupResourceInformation = new SelectedExecutorsResourceInformation(executorSelectionHandle.getExecutors()); parallelizer.computeMaxWidthPerNode(groupResourceInformation); final ExecutionPlanningResources executionPlanningResources = new ExecutionPlanningResources(planningSet, executorSelectionHandle, groupResourceInformation); final Collection<NodeEndpoint> selectedEndpoints = executorSelectionHandle.getExecutors(); stopWatch.stop(); observer.executorsSelected(stopWatch.elapsed(TimeUnit.MILLISECONDS), idealNumFragments, idealNumNodes, selectedEndpoints.size(), executorSelectionHandle.getPlanDetails() + " selectedEndpoints: " + EndpointHelper.getMinimalString(selectedEndpoints) + " hardAffinity: " + hasHardAffinity.value); if (selectedEndpoints.isEmpty()) { ExecutorSelectionUtils.throwEngineOffline(resourceSchedulingDecisionInfo.getQueueTag()); } for (Wrapper wrapper : leafFragments) { parallelizer.parallelizePhase(wrapper, planningSet, selectedEndpoints); } return executionPlanningResources; } // For every fragment, create a Wrapper in PlanningSet. @VisibleForTesting public void initFragmentWrappers(Fragment rootFragment, PlanningSet planningSet) { planningSet.get(rootFragment); for(ExchangeFragmentPair fragmentPair : rootFragment) { initFragmentWrappers(fragmentPair.getNode(), planningSet); } } /** * Based on the affinity of the Exchange that separates two fragments, setup fragment dependencies. * * @param planningSet * @return Returns a list of leaf fragments in fragment dependency graph. */ private static Set<Wrapper> constructFragmentDependencyGraph(PlanningSet planningSet) { // Set up dependency of fragments based on the affinity of exchange that separates the fragments. for(Wrapper currentFragmentWrapper : planningSet) { ExchangeFragmentPair sendingExchange = currentFragmentWrapper.getNode().getSendingExchangePair(); if (sendingExchange != null) { ParallelizationDependency dependency = sendingExchange.getExchange().getParallelizationDependency(); Wrapper receivingFragmentWrapper = planningSet.get(sendingExchange.getNode()); if (dependency == ParallelizationDependency.RECEIVER_DEPENDS_ON_SENDER) { receivingFragmentWrapper.addFragmentDependency(currentFragmentWrapper); } else if (dependency == ParallelizationDependency.SENDER_DEPENDS_ON_RECEIVER) { currentFragmentWrapper.addFragmentDependency(receivingFragmentWrapper); } } } // Identify leaf fragments. Leaf fragments are fragments that have no other fragments depending on them for // parallelization info. First assume all fragments are leaf fragments. Go through the fragments one by one and // remove the fragment on which the current fragment depends on. final Set<Wrapper> roots = Sets.newHashSet(); for(Wrapper w : planningSet) { roots.add(w); } for(Wrapper wrapper : planningSet) { final List<Wrapper> fragmentDependencies = wrapper.getFragmentDependencies(); if (fragmentDependencies != null && fragmentDependencies.size() > 0) { for(Wrapper dependency : fragmentDependencies) { if (roots.contains(dependency)) { roots.remove(dependency); } } } } return roots; } /** * Compute the parallelization stats for a given phase. Dependent phases are processed first before * processing the given phase. * Returns the maximum number of (minor) fragments that could be used by this phase and its dependents */ private int computePhaseStats(Wrapper fragmentWrapper, PlanningSet planningSet, Pointer<Boolean> hasHardAffinity) { // If the fragment is already processed, return. if (fragmentWrapper.isStatsComputationDone()) { return 0; } // First compute the fragment stats for fragments on which this fragment depends on. int width = 0; final List<Wrapper> fragmentDependencies = fragmentWrapper.getFragmentDependencies(); if (fragmentDependencies != null && fragmentDependencies.size() > 0) { for(Wrapper dependency : fragmentDependencies) { width += computePhaseStats(dependency, planningSet, hasHardAffinity); } } // Find stats. Stats include various factors including cost of physical operators, parallelizability of // work in physical operator and affinity of physical operator to certain nodes. fragmentWrapper.getNode().getRoot().accept(new StatsCollector(planningSet, executionMap), fragmentWrapper); DistributionAffinity fragmentAffinity = fragmentWrapper.getStats().getDistributionAffinity(); width += fragmentAffinity.getFragmentParallelizer() .getIdealFragmentWidth(fragmentWrapper, this); if (DistributionAffinity.HARD.equals(fragmentAffinity)) { hasHardAffinity.value = true; } fragmentWrapper.statsComputationDone(); return width; } /** * Helper method for parallelizing a given phase. Dependent phases are parallelized first before * parallelizing the given phase. * Assumes the stats have already been computed for all */ private void parallelizePhase(Wrapper fragmentWrapper, PlanningSet planningSet, Collection<NodeEndpoint> activeEndpoints) throws PhysicalOperatorSetupException { assert fragmentWrapper.isStatsComputationDone(); // If the fragment is already parallelized, return. if (fragmentWrapper.isEndpointsAssignmentDone()) { return; } // First parallelize fragments on which this fragment depends on. final List<Wrapper> fragmentDependencies = fragmentWrapper.getFragmentDependencies(); if (fragmentDependencies != null && fragmentDependencies.size() > 0) { for(Wrapper dependency : fragmentDependencies) { parallelizePhase(dependency, planningSet, activeEndpoints); } } fragmentWrapper.getStats().getDistributionAffinity() .getFragmentParallelizer() .parallelizeFragment(fragmentWrapper, this, activeEndpoints); } /** * To facilitate generating workunits * with the assumption that QueryContext is NOT null * it's not always going to be true, since e.g. QueryContextInfo * may change between ctor and this method * @param options * @param reader * @param rootNode * @param planningSet * @return * @throws ExecutionSetupException */ private List<PlanFragmentFull> generateWorkUnit( OptionList options, PhysicalPlanReader reader, Fragment rootNode, PlanningSet planningSet, PlanFragmentsIndex.Builder indexBuilder) throws ExecutionSetupException { Preconditions.checkNotNull(queryContext); return generateWorkUnit(options, queryContext.getCurrentEndpoint(), queryContext.getQueryId(), reader, rootNode, planningSet, indexBuilder, queryContext.getSession(), queryContext.getQueryContextInfo(), queryContext.getFunctionRegistry()); } protected List<PlanFragmentFull> generateWorkUnit( OptionList options, NodeEndpoint foremanNode, QueryId queryId, PhysicalPlanReader reader, Fragment rootNode, PlanningSet planningSet, PlanFragmentsIndex.Builder indexBuilder, UserSession session, QueryContextInformation queryContextInfo, FunctionLookupContext functionLookupContext) throws ExecutionSetupException { final List<PlanFragmentFull> fragments = Lists.newArrayList(); EndpointsIndex.Builder builder = indexBuilder.getEndpointsIndexBuilder(); MajorFragmentAssignmentCache majorFragmentAssignmentsCache = new MajorFragmentAssignmentCache(); // now we generate all the individual plan fragments and associated assignments. Note, we need all endpoints // assigned before we can materialize, so we start a new loop here rather than utilizing the previous one. for (Wrapper wrapper : planningSet) { Fragment node = wrapper.getNode(); final PhysicalOperator physicalOperatorRoot = node.getRoot(); boolean isRootNode = rootNode == node; if (isRootNode && wrapper.getWidth() != 1) { throw new ForemanSetupException(String.format("Failure while trying to setup fragment. " + "The root fragment must always have parallelization one. In the current case, the width was set to %d.", wrapper.getWidth())); } // a fragment is self driven if it doesn't rely on any other exchanges. boolean isLeafFragment = node.getReceivingExchangePairs().size() == 0; CoordExecRPC.QueryContextInformation queryContextInformation = CoordExecRPC.QueryContextInformation.newBuilder (queryContextInfo) .setQueryMaxAllocation(wrapper.getMemoryAllocationPerNode()).build(); // come up with a list of minor fragments assigned for each endpoint. final List<FragmentAssignment> assignments = new ArrayList<>(); if(queryContext.getOptions().getOption(VectorizedHashAggOperator.OOB_SPILL_TRIGGER_ENABLED) || queryContext.getOptions().getOption(ExternalSortOperator.OOB_SORT_TRIGGER_ENABLED)) { // collate by node. ArrayListMultimap<Integer, Integer> assignMap = ArrayListMultimap.create(); for (int minorFragmentId = 0; minorFragmentId < wrapper.getWidth(); minorFragmentId++) { assignMap.put(builder.addNodeEndpoint(wrapper.getAssignedEndpoint(minorFragmentId)), minorFragmentId); } // create getAssignment lists. for(int ep : assignMap.keySet()) { assignments.add( FragmentAssignment.newBuilder() .setAssignmentIndex(ep) .addAllMinorFragmentId(assignMap.get(ep)) .build()); } } // Create a minorFragment for each major fragment. PlanFragmentMajor major = null; boolean majorAdded = false; // Create a minorFragment for each major fragment. for (int minorFragmentId = 0; minorFragmentId < wrapper.getWidth(); minorFragmentId++) { IndexedFragmentNode iNode = new IndexedFragmentNode(minorFragmentId, wrapper); wrapper.resetAllocation(); PhysicalOperator op = physicalOperatorRoot.accept(new Materializer(wrapper.getSplitSets(), builder), iNode); Preconditions.checkArgument(op instanceof FragmentRoot); FragmentRoot root = (FragmentRoot) op; FragmentHandle handle = FragmentHandle // .newBuilder() // .setMajorFragmentId(wrapper.getMajorFragmentId()) // .setMinorFragmentId(minorFragmentId) .setQueryId(queryId) // .build(); // Build the major fragment only once. if (!majorAdded) { majorAdded = true; // get plan as JSON ByteString plan; ByteString optionsData; try { plan = reader.writeJsonBytes(root, fragmentCodec); optionsData = reader.writeJsonBytes(options, fragmentCodec); } catch (JsonProcessingException e) { throw new ForemanSetupException("Failure while trying to convert fragment into json.", e); } // If any of the operators report ext communicable fragments, fill in the assignment and node details. final Set<Integer> extCommunicableMajorFragments = physicalOperatorRoot.accept(new ExtCommunicableFragmentCollector(), wrapper); majorFragmentAssignmentsCache.populateIfAbsent(planningSet, builder, extCommunicableMajorFragments); final List<MajorFragmentAssignment> extFragmentAssignments = majorFragmentAssignmentsCache.getAssignments(planningSet, builder, extCommunicableMajorFragments); major = PlanFragmentMajor.newBuilder() .setForeman(foremanNode) .setFragmentJson(plan) .setHandle(handle.toBuilder().clearMinorFragmentId().build()) .setLeafFragment(isLeafFragment) .setContext(queryContextInformation) .setMemInitial(wrapper.getInitialAllocation()) .setOptionsJson(optionsData) .setCredentials(session.getCredentials()) .setPriority(queryContextInfo.getPriority()) .setFragmentCodec(fragmentCodec) .addAllAllAssignment(assignments) .addAllExtFragmentAssignments(extFragmentAssignments) .build(); if (logger.isTraceEnabled()) { logger.trace( "Remote major fragment:\n {}", DremioStringUtils.unescapeJava(major.toString())); } } final NodeEndpoint assignment = wrapper.getAssignedEndpoint(minorFragmentId); final NodeEndpoint endpoint = builder.getMinimalEndpoint(assignment); List<MinorAttr> attrList = MinorDataCollector.collect(handle, endpoint, root, new MinorDataSerDe(reader,fragmentCodec), indexBuilder); // Build minor specific info and attributes. PlanFragmentMinor minor = PlanFragmentMinor.newBuilder() .setMajorFragmentId(wrapper.getMajorFragmentId()) .setMinorFragmentId(minorFragmentId) .setAssignment(endpoint) .setMemMax(wrapper.getMemoryAllocationPerNode()) .addAllCollector(CountRequiredFragments.getCollectors(root)) .addAllAttrs(attrList) .build(); if (logger.isTraceEnabled()) { logger.trace( "Remote minor fragment:\n {}", DremioStringUtils.unescapeJava(minor.toString())); } fragments.add(new PlanFragmentFull(major, minor)); } } return fragments; } /** * Designed to setup initial values for arriving fragment accounting. */ protected static class CountRequiredFragments extends AbstractPhysicalVisitor<Void, List<Collector>, RuntimeException> { private CountRequiredFragments() { } public static List<Collector> getCollectors(PhysicalOperator root) { CountRequiredFragments counter = new CountRequiredFragments(); List<Collector> collectors = Lists.newArrayList(); root.accept(counter, collectors); return collectors; } @Override public Void visitReceiver(Receiver receiver, List<Collector> collectors) throws RuntimeException { collectors.add(Collector.newBuilder() .setIsSpooling(receiver.isSpooling()) .setOppositeMajorFragmentId(receiver.getSenderMajorFragmentId()) .setSupportsOutOfOrder(receiver.supportsOutOfOrderExchange()) .addAllIncomingMinorFragmentIndex(receiver.getProvidingEndpoints()) .build()); return null; } @Override public Void visitOp(PhysicalOperator op, List<Collector> collectors) throws RuntimeException { for (PhysicalOperator o : op) { o.accept(this, collectors); } return null; } } // Test-only: change the executor selection service @VisibleForTesting public void setExecutorSelectionService(ExecutorSelectionService executorSelectionService) { this.executorSelectionService = executorSelectionService; } private class MajorFragmentAssignmentCache { private final Map<Integer, MajorFragmentAssignment> majorFragmentAssignments = new HashMap<>(); private List<MajorFragmentAssignment> getAssignments(final PlanningSet planningSet, final EndpointsIndex.Builder builder, final Set<Integer> requiredFragments) { populateIfAbsent(planningSet, builder, requiredFragments); return requiredFragments.stream().map(majorFragmentAssignments::get).collect(Collectors.toList()); } private void populateIfAbsent(final PlanningSet planningSet, final EndpointsIndex.Builder builder, final Set<Integer> requiredFragments) { if (requiredFragments.isEmpty()) { return; } if (majorFragmentAssignments.keySet().containsAll(requiredFragments)) { return; } for (Wrapper wrapper : planningSet) { final int majorFragment = wrapper.getMajorFragmentId(); if (!requiredFragments.contains(majorFragment) || majorFragmentAssignments.containsKey(majorFragment)) { continue; } final ArrayListMultimap<Integer, Integer> assignMap = ArrayListMultimap.create(); for (int minorFragmentId = 0; minorFragmentId < wrapper.getWidth(); minorFragmentId++) { assignMap.put(builder.addNodeEndpoint(wrapper.getAssignedEndpoint(minorFragmentId)), minorFragmentId); } // create getAssignment lists. final List<FragmentAssignment> assignments = assignMap.keySet().stream() .map(ep -> FragmentAssignment.newBuilder().setAssignmentIndex(ep).addAllMinorFragmentId(assignMap.get(ep)).build()) .collect(Collectors.toList()); majorFragmentAssignments.putIfAbsent(majorFragment, MajorFragmentAssignment.newBuilder() .setMajorFragmentId(majorFragment).addAllAllAssignment(assignments).build()); } } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ThreadPoolExecutor; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.ipc.HBaseClient; import org.apache.hadoop.hbase.ipc.HBaseServer; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import static org.junit.Assert.*; @Category(MediumTests.class) public class TestMultiParallel { private static final Log LOG = LogFactory.getLog(TestMultiParallel.class); { ((Log4JLogger)HBaseServer.LOG).getLogger().setLevel(Level.ALL); ((Log4JLogger)HBaseClient.LOG).getLogger().setLevel(Level.ALL); } private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final byte[] VALUE = Bytes.toBytes("value"); private static final byte[] QUALIFIER = Bytes.toBytes("qual"); private static final String FAMILY = "family"; private static final String TEST_TABLE = "multi_test_table"; private static final byte[] BYTES_FAMILY = Bytes.toBytes(FAMILY); private static final byte[] ONE_ROW = Bytes.toBytes("xxx"); private static final byte [][] KEYS = makeKeys(); private static final int slaves = 2; // also used for testing HTable pool size @BeforeClass public static void beforeClass() throws Exception { UTIL.startMiniCluster(slaves); HTable t = UTIL.createTable(Bytes.toBytes(TEST_TABLE), Bytes.toBytes(FAMILY)); UTIL.createMultiRegions(t, Bytes.toBytes(FAMILY)); UTIL.waitTableAvailable(Bytes.toBytes(TEST_TABLE), 15 * 1000); t.close(); } @AfterClass public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } @Before public void before() throws IOException { LOG.info("before"); if (UTIL.ensureSomeRegionServersAvailable(slaves)) { // Distribute regions UTIL.getMiniHBaseCluster().getMaster().balance(); } LOG.info("before done"); } private static byte[][] makeKeys() { byte [][] starterKeys = HBaseTestingUtility.KEYS; // Create a "non-uniform" test set with the following characteristics: // a) Unequal number of keys per region // Don't use integer as a multiple, so that we have a number of keys that is // not a multiple of the number of regions int numKeys = (int) ((float) starterKeys.length * 10.33F); List<byte[]> keys = new ArrayList<byte[]>(); for (int i = 0; i < numKeys; i++) { int kIdx = i % starterKeys.length; byte[] k = starterKeys[kIdx]; byte[] cp = new byte[k.length + 1]; System.arraycopy(k, 0, cp, 0, k.length); cp[k.length] = new Integer(i % 256).byteValue(); keys.add(cp); } // b) Same duplicate keys (showing multiple Gets/Puts to the same row, which // should work) // c) keys are not in sorted order (within a region), to ensure that the // sorting code and index mapping doesn't break the functionality for (int i = 0; i < 100; i++) { int kIdx = i % starterKeys.length; byte[] k = starterKeys[kIdx]; byte[] cp = new byte[k.length + 1]; System.arraycopy(k, 0, cp, 0, k.length); cp[k.length] = new Integer(i % 256).byteValue(); keys.add(cp); } return keys.toArray(new byte [][] {new byte [] {}}); } /** * This is for testing the active number of threads that were used while * doing a batch operation. It inserts one row per region via the batch * operation, and then checks the number of active threads. * For HBASE-3553 * @throws IOException * @throws InterruptedException * @throws NoSuchFieldException * @throws SecurityException */ @Test(timeout=300000) public void testActiveThreadsCount() throws Exception{ HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); List<Row> puts = constructPutRequests(); // creates a Put for every region table.batch(puts); Field poolField = table.getClass().getDeclaredField("pool"); poolField.setAccessible(true); ThreadPoolExecutor tExecutor = (ThreadPoolExecutor) poolField.get(table); assertEquals(slaves, tExecutor.getLargestPoolSize()); table.close(); } @Test(timeout=300000) public void testBatchWithGet() throws Exception { LOG.info("test=testBatchWithGet"); HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // load test data List<Row> puts = constructPutRequests(); table.batch(puts); // create a list of gets and run it List<Row> gets = new ArrayList<Row>(); for (byte[] k : KEYS) { Get get = new Get(k); get.addColumn(BYTES_FAMILY, QUALIFIER); gets.add(get); } Result[] multiRes = new Result[gets.size()]; table.batch(gets, multiRes); // Same gets using individual call API List<Result> singleRes = new ArrayList<Result>(); for (Row get : gets) { singleRes.add(table.get((Get) get)); } // Compare results Assert.assertEquals(singleRes.size(), multiRes.length); for (int i = 0; i < singleRes.size(); i++) { Assert.assertTrue(singleRes.get(i).containsColumn(BYTES_FAMILY, QUALIFIER)); KeyValue[] singleKvs = singleRes.get(i).raw(); KeyValue[] multiKvs = multiRes[i].raw(); for (int j = 0; j < singleKvs.length; j++) { Assert.assertEquals(singleKvs[j], multiKvs[j]); Assert.assertEquals(0, Bytes.compareTo(singleKvs[j].getValue(), multiKvs[j] .getValue())); } } table.close(); } @Test public void testBadFam() throws Exception { LOG.info("test=testBadFam"); HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); List<Row> actions = new ArrayList<Row>(); Put p = new Put(Bytes.toBytes("row1")); p.add(Bytes.toBytes("bad_family"), Bytes.toBytes("qual"), Bytes.toBytes("value")); actions.add(p); p = new Put(Bytes.toBytes("row2")); p.add(BYTES_FAMILY, Bytes.toBytes("qual"), Bytes.toBytes("value")); actions.add(p); // row1 and row2 should be in the same region. Object [] r = new Object[actions.size()]; try { table.batch(actions, r); fail(); } catch (RetriesExhaustedWithDetailsException ex) { LOG.debug(ex); // good! assertFalse(ex.mayHaveClusterIssues()); } assertEquals(2, r.length); assertTrue(r[0] instanceof Throwable); assertTrue(r[1] instanceof Result); table.close(); } @Test (timeout=300000) public void testFlushCommitsNoAbort() throws Exception { LOG.info("test=testFlushCommitsNoAbort"); doTestFlushCommits(false); } /** * Only run one Multi test with a forced RegionServer abort. Otherwise, the * unit tests will take an unnecessarily long time to run. * * @throws Exception */ @Test (timeout=300000) public void testFlushCommitsWithAbort() throws Exception { LOG.info("test=testFlushCommitsWithAbort"); doTestFlushCommits(true); } /** * Set table auto flush to false and test flushing commits * @param doAbort true if abort one regionserver in the testing * @throws Exception */ private void doTestFlushCommits(boolean doAbort) throws Exception { // Load the data LOG.info("get new table"); HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); table.setAutoFlush(false); table.setWriteBufferSize(10 * 1024 * 1024); LOG.info("constructPutRequests"); List<Row> puts = constructPutRequests(); for (Row put : puts) { table.put((Put) put); } LOG.info("puts"); table.flushCommits(); int liveRScount = UTIL.getMiniHBaseCluster().getLiveRegionServerThreads() .size(); assert liveRScount > 0; JVMClusterUtil.RegionServerThread liveRS = UTIL.getMiniHBaseCluster() .getLiveRegionServerThreads().get(0); if (doAbort) { liveRS.getRegionServer().abort("Aborting for tests", new Exception("doTestFlushCommits")); // If we wait for no regions being online after we abort the server, we // could ensure the master has re-assigned the regions on killed server // after writing successfully. It means the server we aborted is dead // and detected by matser while (liveRS.getRegionServer().getNumberOfOnlineRegions() != 0) { Thread.sleep(10); } // try putting more keys after the abort. same key/qual... just validating // no exceptions thrown puts = constructPutRequests(); for (Row put : puts) { table.put((Put) put); } table.flushCommits(); } LOG.info("validating loaded data"); validateLoadedData(table); // Validate server and region count List<JVMClusterUtil.RegionServerThread> liveRSs = UTIL.getMiniHBaseCluster().getLiveRegionServerThreads(); int count = 0; for (JVMClusterUtil.RegionServerThread t: liveRSs) { count++; LOG.info("Count=" + count + ", Alive=" + t.getRegionServer()); } LOG.info("Count=" + count); Assert.assertEquals("Server count=" + count + ", abort=" + doAbort, (doAbort ? (liveRScount - 1) : liveRScount), count); for (JVMClusterUtil.RegionServerThread t: liveRSs) { int regions = ProtobufUtil.getOnlineRegions(t.getRegionServer()).size(); // Assert.assertTrue("Count of regions=" + regions, regions > 10); } table.close(); LOG.info("done"); } @Test (timeout=300000) public void testBatchWithPut() throws Exception { LOG.info("test=testBatchWithPut"); HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // put multiple rows using a batch List<Row> puts = constructPutRequests(); Object[] results = table.batch(puts); validateSizeAndEmpty(results, KEYS.length); if (true) { int liveRScount = UTIL.getMiniHBaseCluster().getLiveRegionServerThreads() .size(); assert liveRScount > 0; JVMClusterUtil.RegionServerThread liveRS = UTIL.getMiniHBaseCluster() .getLiveRegionServerThreads().get(0); liveRS.getRegionServer().abort("Aborting for tests", new Exception("testBatchWithPut")); puts = constructPutRequests(); results = table.batch(puts); validateSizeAndEmpty(results, KEYS.length); } validateLoadedData(table); table.close(); } @Test(timeout=300000) public void testBatchWithDelete() throws Exception { LOG.info("test=testBatchWithDelete"); HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // Load some data List<Row> puts = constructPutRequests(); Object[] results = table.batch(puts); validateSizeAndEmpty(results, KEYS.length); // Deletes List<Row> deletes = new ArrayList<Row>(); for (int i = 0; i < KEYS.length; i++) { Delete delete = new Delete(KEYS[i]); delete.deleteFamily(BYTES_FAMILY); deletes.add(delete); } results = table.batch(deletes); validateSizeAndEmpty(results, KEYS.length); // Get to make sure ... for (byte[] k : KEYS) { Get get = new Get(k); get.addColumn(BYTES_FAMILY, QUALIFIER); Assert.assertFalse(table.exists(get)); } table.close(); } @Test(timeout=300000) public void testHTableDeleteWithList() throws Exception { LOG.info("test=testHTableDeleteWithList"); HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // Load some data List<Row> puts = constructPutRequests(); Object[] results = table.batch(puts); validateSizeAndEmpty(results, KEYS.length); // Deletes ArrayList<Delete> deletes = new ArrayList<Delete>(); for (int i = 0; i < KEYS.length; i++) { Delete delete = new Delete(KEYS[i]); delete.deleteFamily(BYTES_FAMILY); deletes.add(delete); } table.delete(deletes); Assert.assertTrue(deletes.isEmpty()); // Get to make sure ... for (byte[] k : KEYS) { Get get = new Get(k); get.addColumn(BYTES_FAMILY, QUALIFIER); Assert.assertFalse(table.exists(get)); } table.close(); } @Test(timeout=300000) public void testBatchWithManyColsInOneRowGetAndPut() throws Exception { LOG.info("test=testBatchWithManyColsInOneRowGetAndPut"); HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); List<Row> puts = new ArrayList<Row>(); for (int i = 0; i < 100; i++) { Put put = new Put(ONE_ROW); byte[] qual = Bytes.toBytes("column" + i); put.add(BYTES_FAMILY, qual, VALUE); puts.add(put); } Object[] results = table.batch(puts); // validate validateSizeAndEmpty(results, 100); // get the data back and validate that it is correct List<Row> gets = new ArrayList<Row>(); for (int i = 0; i < 100; i++) { Get get = new Get(ONE_ROW); byte[] qual = Bytes.toBytes("column" + i); get.addColumn(BYTES_FAMILY, qual); gets.add(get); } Object[] multiRes = table.batch(gets); int idx = 0; for (Object r : multiRes) { byte[] qual = Bytes.toBytes("column" + idx); validateResult(r, qual, VALUE); idx++; } table.close(); } @Test(timeout=300000) public void testBatchWithIncrementAndAppend() throws Exception { LOG.info("test=testBatchWithIncrementAndAppend"); final byte[] QUAL1 = Bytes.toBytes("qual1"); final byte[] QUAL2 = Bytes.toBytes("qual2"); final byte[] QUAL3 = Bytes.toBytes("qual3"); final byte[] QUAL4 = Bytes.toBytes("qual4"); HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); Delete d = new Delete(ONE_ROW); table.delete(d); Put put = new Put(ONE_ROW); put.add(BYTES_FAMILY, QUAL1, Bytes.toBytes("abc")); put.add(BYTES_FAMILY, QUAL2, Bytes.toBytes(1L)); table.put(put); Increment inc = new Increment(ONE_ROW); inc.addColumn(BYTES_FAMILY, QUAL2, 1); inc.addColumn(BYTES_FAMILY, QUAL3, 1); Append a = new Append(ONE_ROW); a.add(BYTES_FAMILY, QUAL1, Bytes.toBytes("def")); a.add(BYTES_FAMILY, QUAL4, Bytes.toBytes("xyz")); List<Row> actions = new ArrayList<Row>(); actions.add(inc); actions.add(a); Object[] multiRes = table.batch(actions); validateResult(multiRes[1], QUAL1, Bytes.toBytes("abcdef")); validateResult(multiRes[1], QUAL4, Bytes.toBytes("xyz")); validateResult(multiRes[0], QUAL2, Bytes.toBytes(2L)); validateResult(multiRes[0], QUAL3, Bytes.toBytes(1L)); table.close(); } @Test(timeout=300000) public void testBatchWithMixedActions() throws Exception { LOG.info("test=testBatchWithMixedActions"); HTable table = new HTable(UTIL.getConfiguration(), TEST_TABLE); // Load some data to start Object[] results = table.batch(constructPutRequests()); validateSizeAndEmpty(results, KEYS.length); // Batch: get, get, put(new col), delete, get, get of put, get of deleted, // put List<Row> actions = new ArrayList<Row>(); byte[] qual2 = Bytes.toBytes("qual2"); byte[] val2 = Bytes.toBytes("putvalue2"); // 0 get Get get = new Get(KEYS[10]); get.addColumn(BYTES_FAMILY, QUALIFIER); actions.add(get); // 1 get get = new Get(KEYS[11]); get.addColumn(BYTES_FAMILY, QUALIFIER); actions.add(get); // 2 put of new column Put put = new Put(KEYS[10]); put.add(BYTES_FAMILY, qual2, val2); actions.add(put); // 3 delete Delete delete = new Delete(KEYS[20]); delete.deleteFamily(BYTES_FAMILY); actions.add(delete); // 4 get get = new Get(KEYS[30]); get.addColumn(BYTES_FAMILY, QUALIFIER); actions.add(get); // There used to be a 'get' of a previous put here, but removed // since this API really cannot guarantee order in terms of mixed // get/puts. // 5 put of new column put = new Put(KEYS[40]); put.add(BYTES_FAMILY, qual2, val2); actions.add(put); results = table.batch(actions); // Validation validateResult(results[0]); validateResult(results[1]); validateEmpty(results[2]); validateEmpty(results[3]); validateResult(results[4]); validateEmpty(results[5]); // validate last put, externally from the batch get = new Get(KEYS[40]); get.addColumn(BYTES_FAMILY, qual2); Result r = table.get(get); validateResult(r, qual2, val2); table.close(); } // // Helper methods //// private void validateResult(Object r) { validateResult(r, QUALIFIER, VALUE); } private void validateResult(Object r1, byte[] qual, byte[] val) { // TODO provide nice assert here or something. Result r = (Result)r1; Assert.assertTrue(r.containsColumn(BYTES_FAMILY, qual)); Assert.assertEquals(0, Bytes.compareTo(val, r.getValue(BYTES_FAMILY, qual))); } private List<Row> constructPutRequests() { List<Row> puts = new ArrayList<Row>(); for (byte[] k : KEYS) { Put put = new Put(k); put.add(BYTES_FAMILY, QUALIFIER, VALUE); puts.add(put); } return puts; } private void validateLoadedData(HTable table) throws IOException { // get the data back and validate that it is correct for (byte[] k : KEYS) { Get get = new Get(k); get.addColumn(BYTES_FAMILY, QUALIFIER); Result r = table.get(get); Assert.assertTrue(r.containsColumn(BYTES_FAMILY, QUALIFIER)); Assert.assertEquals(0, Bytes.compareTo(VALUE, r .getValue(BYTES_FAMILY, QUALIFIER))); } } private void validateEmpty(Object r1) { Result result = (Result)r1; Assert.assertTrue(result != null); Assert.assertTrue(result.getRow() == null); Assert.assertEquals(0, result.raw().length); } private void validateSizeAndEmpty(Object[] results, int expectedSize) { // Validate got back the same number of Result objects, all empty Assert.assertEquals(expectedSize, results.length); for (Object result : results) { validateEmpty(result); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.http; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintWriter; import java.net.BindException; import java.net.InetSocketAddress; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.ConfServlet; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.http.AdminAuthorizedServlet; import org.apache.hadoop.http.FilterContainer; import org.apache.hadoop.http.FilterInitializer; import org.apache.hadoop.http.HtmlQuoting; import org.apache.hadoop.log.LogLevel; import org.apache.hadoop.metrics.MetricsServlet; import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector.MODE; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.ReflectionUtils; import org.mortbay.io.Buffer; import org.mortbay.jetty.Connector; import org.mortbay.jetty.Handler; import org.mortbay.jetty.MimeTypes; import org.mortbay.jetty.Server; import org.mortbay.jetty.handler.ContextHandler; import org.mortbay.jetty.handler.ContextHandlerCollection; import org.mortbay.jetty.nio.SelectChannelConnector; import org.mortbay.jetty.security.SslSocketConnector; import org.mortbay.jetty.servlet.Context; import org.mortbay.jetty.servlet.DefaultServlet; import org.mortbay.jetty.servlet.FilterHolder; import org.mortbay.jetty.servlet.FilterMapping; import org.mortbay.jetty.servlet.ServletHandler; import org.mortbay.jetty.servlet.ServletHolder; import org.mortbay.jetty.webapp.WebAppContext; import org.mortbay.thread.QueuedThreadPool; import org.mortbay.util.MultiException; /** * Create a Jetty embedded server to answer http requests. The primary goal * is to serve up status information for the server. * There are three contexts: * "/logs/" -> points to the log directory * "/static/" -> points to common static files (src/webapps/static) * "/" -> the jsp server code from (src/webapps/<name>) */ public class HttpServer implements FilterContainer { public static final Log LOG = LogFactory.getLog(HttpServer.class); static final String FILTER_INITIALIZER_PROPERTY = "hadoop.http.filter.initializers"; static final String HTTP_MAX_THREADS = "hadoop.http.max.threads"; // The ServletContext attribute where the daemon Configuration // gets stored. public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf"; static final String ADMINS_ACL = "admins.acl"; private AccessControlList adminsAcl; protected final Server webServer; protected final Connector listener; protected final WebAppContext webAppContext; protected final boolean findPort; protected final Map<Context, Boolean> defaultContexts = new HashMap<Context, Boolean>(); protected final List<String> filterNames = new ArrayList<String>(); private static final int MAX_RETRIES = 10; static final String STATE_DESCRIPTION_ALIVE = " - alive"; static final String STATE_DESCRIPTION_NOT_LIVE = " - not live"; private final boolean listenerStartedExternally; /** Same as this(name, bindAddress, port, findPort, null); */ public HttpServer(String name, String bindAddress, int port, boolean findPort ) throws IOException { this(name, bindAddress, port, findPort, new Configuration()); } public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf, Connector connector) throws IOException { this(name, bindAddress, port, findPort, conf, null, connector); } /** * Create a status server on the given port. * The jsp scripts are taken from src/webapps/<name>. * @param name The name of the server * @param port The port to use on the server * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. * @param conf Configuration */ public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf) throws IOException { this(name, bindAddress, port, findPort, conf, null, null); } public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf, AccessControlList adminsAcl) throws IOException { this(name, bindAddress, port, findPort, conf, adminsAcl, null); } /** * Create a status server on the given port. * The jsp scripts are taken from src/webapps/<name>. * @param name The name of the server * @param port The port to use on the server * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. * @param conf Configuration * @param adminsAcl {@link AccessControlList} of the admins */ public HttpServer(String name, String bindAddress, int port, boolean findPort, Configuration conf, AccessControlList adminsAcl, Connector connector) throws IOException { webServer = new Server(); this.findPort = findPort; this.adminsAcl = adminsAcl; if(connector == null) { listenerStartedExternally = false; listener = createBaseListener(conf); listener.setHost(bindAddress); listener.setPort(port); } else { listenerStartedExternally = true; listener = connector; } webServer.addConnector(listener); int maxThreads = conf.getInt(HTTP_MAX_THREADS, -1); // If HTTP_MAX_THREADS is not configured, QueueThreadPool() will use the // default value (currently 254). QueuedThreadPool threadPool = maxThreads == -1 ? new QueuedThreadPool() : new QueuedThreadPool(maxThreads); webServer.setThreadPool(threadPool); final String appDir = getWebAppsPath(name); ContextHandlerCollection contexts = new ContextHandlerCollection(); webServer.setHandler(contexts); webAppContext = new WebAppContext(); webAppContext.setDisplayName("WepAppsContext"); webAppContext.setContextPath("/"); webAppContext.setWar(appDir + "/" + name); webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); webAppContext.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); webServer.addHandler(webAppContext); addDefaultApps(contexts, appDir, conf); defineFilter(webAppContext, "krb5Filter", Krb5AndCertsSslSocketConnector.Krb5SslFilter.class.getName(), null, null); addGlobalFilter("safety", QuotingInputFilter.class.getName(), null); final FilterInitializer[] initializers = getFilterInitializers(conf); if (initializers != null) { for(FilterInitializer c : initializers) { c.initFilter(this, conf); } } addDefaultServlets(); } /** * Create a required listener for the Jetty instance listening on the port * provided. This wrapper and all subclasses must create at least one * listener. */ public Connector createBaseListener(Configuration conf) throws IOException { return HttpServer.createDefaultChannelConnector(); } @InterfaceAudience.Private public static Connector createDefaultChannelConnector() { SelectChannelConnector ret = new SelectChannelConnector(); ret.setLowResourceMaxIdleTime(10000); ret.setAcceptQueueSize(128); ret.setResolveNames(false); ret.setUseDirectBuffers(false); return ret; } /** Get an array of FilterConfiguration specified in the conf */ private static FilterInitializer[] getFilterInitializers(Configuration conf) { if (conf == null) { return null; } Class<?>[] classes = conf.getClasses(FILTER_INITIALIZER_PROPERTY); if (classes == null) { return null; } FilterInitializer[] initializers = new FilterInitializer[classes.length]; for(int i = 0; i < classes.length; i++) { initializers[i] = (FilterInitializer)ReflectionUtils.newInstance( classes[i], conf); } return initializers; } /** * Add default apps. * @param appDir The application directory * @throws IOException */ protected void addDefaultApps(ContextHandlerCollection parent, final String appDir, Configuration conf) throws IOException { // set up the context for "/logs/" if "hadoop.log.dir" property is defined. String logDir = System.getProperty("hadoop.log.dir"); if (logDir != null) { Context logContext = new Context(parent, "/logs"); logContext.setResourceBase(logDir); logContext.addServlet(AdminAuthorizedServlet.class, "/"); logContext.setDisplayName("logs"); setContextAttributes(logContext, conf); defaultContexts.put(logContext, true); } // set up the context for "/static/*" Context staticContext = new Context(parent, "/static"); staticContext.setResourceBase(appDir + "/static"); staticContext.addServlet(DefaultServlet.class, "/*"); staticContext.setDisplayName("static"); setContextAttributes(staticContext, conf); defaultContexts.put(staticContext, true); } private void setContextAttributes(Context context, Configuration conf) { context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); } /** * Add default servlets. */ protected void addDefaultServlets() { // set up default servlets addServlet("stacks", "/stacks", StackServlet.class); addServlet("logLevel", "/logLevel", LogLevel.Servlet.class); addServlet("metrics", "/metrics", MetricsServlet.class); addServlet("conf", "/conf", ConfServlet.class); } public void addContext(Context ctxt, boolean isFiltered) throws IOException { webServer.addHandler(ctxt); defaultContexts.put(ctxt, isFiltered); } /** * Add a context * @param pathSpec The path spec for the context * @param dir The directory containing the context * @param isFiltered if true, the servlet is added to the filter path mapping * @throws IOException */ protected void addContext(String pathSpec, String dir, boolean isFiltered) throws IOException { if (0 == webServer.getHandlers().length) { throw new RuntimeException("Couldn't find handler"); } WebAppContext webAppCtx = new WebAppContext(); webAppCtx.setContextPath(pathSpec); webAppCtx.setWar(dir); addContext(webAppCtx, true); } /** * Set a value in the webapp context. These values are available to the jsp * pages as "application.getAttribute(name)". * @param name The name of the attribute * @param value The value of the attribute */ public void setAttribute(String name, Object value) { webAppContext.setAttribute(name, value); } /** * Add a servlet in the server. * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class */ public void addServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz) { addInternalServlet(name, pathSpec, clazz, false); addFilterPathMapping(pathSpec, webAppContext); } /** * Add an internal servlet in the server. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For * servlets added using this method, filters are not enabled. * * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class */ public void addInternalServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz) { addInternalServlet(name, pathSpec, clazz, false); } /** * Add an internal servlet in the server, specifying whether or not to * protect with Kerberos authentication. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For * servlets added using this method, filters (except internal Kerberized * filters) are not enabled. * * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class */ public void addInternalServlet(String name, String pathSpec, Class<? extends HttpServlet> clazz, boolean requireAuth) { ServletHolder holder = new ServletHolder(clazz); if (name != null) { holder.setName(name); } webAppContext.addServlet(holder, pathSpec); if(requireAuth && UserGroupInformation.isSecurityEnabled()) { LOG.info("Adding Kerberos filter to " + name); ServletHandler handler = webAppContext.getServletHandler(); FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); fmap.setFilterName("krb5Filter"); fmap.setDispatches(Handler.ALL); handler.addFilterMapping(fmap); } } /** {@inheritDoc} */ public void addFilter(String name, String classname, Map<String, String> parameters) { final String[] USER_FACING_URLS = { "*.html", "*.jsp" }; defineFilter(webAppContext, name, classname, parameters, USER_FACING_URLS); LOG.info("Added filter " + name + " (class=" + classname + ") to context " + webAppContext.getDisplayName()); final String[] ALL_URLS = { "/*" }; for (Map.Entry<Context, Boolean> e : defaultContexts.entrySet()) { if (e.getValue()) { Context ctx = e.getKey(); defineFilter(ctx, name, classname, parameters, ALL_URLS); LOG.info("Added filter " + name + " (class=" + classname + ") to context " + ctx.getDisplayName()); } } filterNames.add(name); } /** {@inheritDoc} */ public void addGlobalFilter(String name, String classname, Map<String, String> parameters) { final String[] ALL_URLS = { "/*" }; defineFilter(webAppContext, name, classname, parameters, ALL_URLS); for (Context ctx : defaultContexts.keySet()) { defineFilter(ctx, name, classname, parameters, ALL_URLS); } LOG.info("Added global filter" + name + " (class=" + classname + ")"); } /** * Define a filter for a context and set up default url mappings. */ protected void defineFilter(Context ctx, String name, String classname, Map<String,String> parameters, String[] urls) { FilterHolder holder = new FilterHolder(); holder.setName(name); holder.setClassName(classname); holder.setInitParameters(parameters); FilterMapping fmap = new FilterMapping(); fmap.setPathSpecs(urls); fmap.setDispatches(Handler.ALL); fmap.setFilterName(name); ServletHandler handler = ctx.getServletHandler(); handler.addFilter(holder, fmap); } /** * Add the path spec to the filter path mapping. * @param pathSpec The path spec * @param webAppCtx The WebApplicationContext to add to */ protected void addFilterPathMapping(String pathSpec, Context webAppCtx) { ServletHandler handler = webAppCtx.getServletHandler(); for(String name : filterNames) { FilterMapping fmap = new FilterMapping(); fmap.setPathSpec(pathSpec); fmap.setFilterName(name); fmap.setDispatches(Handler.ALL); handler.addFilterMapping(fmap); } } /** * Get the value in the webapp context. * @param name The name of the attribute * @return The value of the attribute */ public Object getAttribute(String name) { return webAppContext.getAttribute(name); } /** * Get the pathname to the webapps files. * @param appName eg "secondary" or "datanode" * @return the pathname as a URL * @throws FileNotFoundException if 'webapps' directory cannot be found on CLASSPATH. */ private String getWebAppsPath(String appName) throws FileNotFoundException { URL url = getClass().getClassLoader().getResource("webapps/" + appName); if (url == null) throw new FileNotFoundException("webapps/" + appName + " not found in CLASSPATH"); String urlString = url.toString(); return urlString.substring(0, urlString.lastIndexOf('/')); } /** * Get the port that the server is on * @return the port */ public int getPort() { return webServer.getConnectors()[0].getLocalPort(); } /** * Set the min, max number of worker threads (simultaneous connections). */ public void setThreads(int min, int max) { QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool() ; pool.setMinThreads(min); pool.setMaxThreads(max); } /** * Configure an ssl listener on the server. * @param addr address to listen on * @param keystore location of the keystore * @param storPass password for the keystore * @param keyPass password for the key * @deprecated Use {@link #addSslListener(InetSocketAddress, Configuration, boolean)} */ @Deprecated public void addSslListener(InetSocketAddress addr, String keystore, String storPass, String keyPass) throws IOException { if (webServer.isStarted()) { throw new IOException("Failed to add ssl listener"); } SslSocketConnector sslListener = new SslSocketConnector(); sslListener.setHost(addr.getHostName()); sslListener.setPort(addr.getPort()); sslListener.setKeystore(keystore); sslListener.setPassword(storPass); sslListener.setKeyPassword(keyPass); webServer.addConnector(sslListener); } /** * Configure an ssl listener on the server. * @param addr address to listen on * @param sslConf conf to retrieve ssl options * @param needClientAuth whether client authentication is required */ public void addSslListener(InetSocketAddress addr, Configuration sslConf, boolean needClientAuth) throws IOException { addSslListener(addr, sslConf, needClientAuth, false); } /** * Configure an ssl listener on the server. * @param addr address to listen on * @param sslConf conf to retrieve ssl options * @param needCertsAuth whether x509 certificate authentication is required * @param needKrbAuth whether to allow kerberos auth */ public void addSslListener(InetSocketAddress addr, Configuration sslConf, boolean needCertsAuth, boolean needKrbAuth) throws IOException { if (webServer.isStarted()) { throw new IOException("Failed to add ssl listener"); } if (needCertsAuth) { // setting up SSL truststore for authenticating clients System.setProperty("javax.net.ssl.trustStore", sslConf.get( "ssl.server.truststore.location", "")); System.setProperty("javax.net.ssl.trustStorePassword", sslConf.get( "ssl.server.truststore.password", "")); System.setProperty("javax.net.ssl.trustStoreType", sslConf.get( "ssl.server.truststore.type", "jks")); } Krb5AndCertsSslSocketConnector.MODE mode; if(needCertsAuth && needKrbAuth) mode = MODE.BOTH; else if (!needCertsAuth && needKrbAuth) mode = MODE.KRB; else // Default to certificates mode = MODE.CERTS; SslSocketConnector sslListener = new Krb5AndCertsSslSocketConnector(mode); sslListener.setHost(addr.getHostName()); sslListener.setPort(addr.getPort()); sslListener.setKeystore(sslConf.get("ssl.server.keystore.location")); sslListener.setPassword(sslConf.get("ssl.server.keystore.password", "")); sslListener.setKeyPassword(sslConf.get("ssl.server.keystore.keypassword", "")); sslListener.setKeystoreType(sslConf.get("ssl.server.keystore.type", "jks")); sslListener.setNeedClientAuth(needCertsAuth); webServer.addConnector(sslListener); } /** * Start the server. Does not wait for the server to start. */ public void start() throws IOException { try { if(listenerStartedExternally) { // Expect that listener was started securely if(listener.getLocalPort() == -1) // ... and verify throw new Exception("Exepected webserver's listener to be started " + "previously but wasn't"); // And skip all the port rolling issues. webServer.start(); } else { int port = 0; int oriPort = listener.getPort(); // The original requested port while (true) { try { port = webServer.getConnectors()[0].getLocalPort(); LOG.info("Port returned by webServer.getConnectors()[0]." + "getLocalPort() before open() is "+ port + ". Opening the listener on " + oriPort); listener.open(); port = listener.getLocalPort(); LOG.info("listener.getLocalPort() returned " + listener.getLocalPort() + " webServer.getConnectors()[0].getLocalPort() returned " + webServer.getConnectors()[0].getLocalPort()); //Workaround to handle the problem reported in HADOOP-4744 if (port < 0) { Thread.sleep(100); int numRetries = 1; while (port < 0) { LOG.warn("listener.getLocalPort returned " + port); if (numRetries++ > MAX_RETRIES) { throw new Exception(" listener.getLocalPort is returning " + "less than 0 even after " +numRetries+" resets"); } for (int i = 0; i < 2; i++) { LOG.info("Retrying listener.getLocalPort()"); port = listener.getLocalPort(); if (port > 0) { break; } Thread.sleep(200); } if (port > 0) { break; } LOG.info("Bouncing the listener"); listener.close(); Thread.sleep(1000); listener.setPort(oriPort == 0 ? 0 : (oriPort += 1)); listener.open(); Thread.sleep(100); port = listener.getLocalPort(); } } //Workaround end LOG.info("Jetty bound to port " + port); webServer.start(); break; } catch (IOException ex) { // if this is a bind exception, // then try the next port number. if (ex instanceof BindException) { if (!findPort) { BindException be = new BindException( "Port in use: " + listener.getHost() + ":" + listener.getPort()); be.initCause(ex); throw be; } } else { LOG.info("HttpServer.start() threw a non Bind IOException"); throw ex; } } catch (MultiException ex) { LOG.info("HttpServer.start() threw a MultiException"); throw ex; } listener.setPort((oriPort += 1)); } } } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException("Problem starting http server", e); } } /** * stop the server */ public void stop() throws Exception { listener.close(); webServer.stop(); } public void join() throws InterruptedException { webServer.join(); } /** * Test for the availability of the web server * @return true if the web server is started, false otherwise */ public boolean isAlive() { return webServer != null && webServer.isStarted(); } /** * Return the host and port of the HttpServer, if live * @return the classname and any HTTP URL */ @Override public String toString() { return listener != null ? ("HttpServer at http://" + listener.getHost() + ":" + listener.getLocalPort() + "/" + (isAlive() ? STATE_DESCRIPTION_ALIVE : STATE_DESCRIPTION_NOT_LIVE)) : "Inactive HttpServer"; } /** * Does the user sending the HttpServletRequest has the administrator ACLs? If * it isn't the case, response will be modified to send an error to the user. * * @param servletContext * @param request * @param response * @return true if admin-authorized, false otherwise * @throws IOException */ public static boolean hasAdministratorAccess( ServletContext servletContext, HttpServletRequest request, HttpServletResponse response) throws IOException { Configuration conf = (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE); // If there is no authorization, anybody has administrator access. if (!conf.getBoolean( CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) { return true; } String remoteUser = request.getRemoteUser(); if (remoteUser == null) { return true; } AccessControlList adminsAcl = (AccessControlList) servletContext .getAttribute(ADMINS_ACL); UserGroupInformation remoteUserUGI = UserGroupInformation.createRemoteUser(remoteUser); if (adminsAcl != null) { if (!adminsAcl.isUserAllowed(remoteUserUGI)) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User " + remoteUser + " is unauthorized to access this page. " + "AccessControlList for accessing this page : " + adminsAcl.toString()); return false; } } return true; } /** * A very simple servlet to serve up a text representation of the current * stack traces. It both returns the stacks to the caller and logs them. * Currently the stack traces are done sequentially rather than exactly the * same data. */ public static class StackServlet extends HttpServlet { private static final long serialVersionUID = -6284183679759467039L; @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Do the authorization if (!HttpServer.hasAdministratorAccess(getServletContext(), request, response)) { return; } PrintWriter out = response.getWriter(); ReflectionUtils.printThreadInfo(out, ""); out.close(); ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1); } } /** * A Servlet input filter that quotes all HTML active characters in the * parameter names and values. The goal is to quote the characters to make * all of the servlets resistant to cross-site scripting attacks. */ public static class QuotingInputFilter implements Filter { private FilterConfig config; public static class RequestQuoter extends HttpServletRequestWrapper { private final HttpServletRequest rawRequest; public RequestQuoter(HttpServletRequest rawRequest) { super(rawRequest); this.rawRequest = rawRequest; } /** * Return the set of parameter names, quoting each name. */ @SuppressWarnings("unchecked") @Override public Enumeration<String> getParameterNames() { return new Enumeration<String>() { private Enumeration<String> rawIterator = rawRequest.getParameterNames(); @Override public boolean hasMoreElements() { return rawIterator.hasMoreElements(); } @Override public String nextElement() { return HtmlQuoting.quoteHtmlChars(rawIterator.nextElement()); } }; } /** * Unquote the name and quote the value. */ @Override public String getParameter(String name) { return HtmlQuoting.quoteHtmlChars(rawRequest.getParameter (HtmlQuoting.unquoteHtmlChars(name))); } @Override public String[] getParameterValues(String name) { String unquoteName = HtmlQuoting.unquoteHtmlChars(name); String[] unquoteValue = rawRequest.getParameterValues(unquoteName); String[] result = new String[unquoteValue.length]; for(int i=0; i < result.length; ++i) { result[i] = HtmlQuoting.quoteHtmlChars(unquoteValue[i]); } return result; } @SuppressWarnings("unchecked") @Override public Map<String, String[]> getParameterMap() { Map<String, String[]> result = new HashMap<String,String[]>(); Map<String, String[]> raw = rawRequest.getParameterMap(); for (Map.Entry<String,String[]> item: raw.entrySet()) { String[] rawValue = item.getValue(); String[] cookedValue = new String[rawValue.length]; for(int i=0; i< rawValue.length; ++i) { cookedValue[i] = HtmlQuoting.quoteHtmlChars(rawValue[i]); } result.put(HtmlQuoting.quoteHtmlChars(item.getKey()), cookedValue); } return result; } /** * Quote the url so that users specifying the HOST HTTP header * can't inject attacks. */ @Override public StringBuffer getRequestURL(){ String url = rawRequest.getRequestURL().toString(); return new StringBuffer(HtmlQuoting.quoteHtmlChars(url)); } /** * Quote the server name so that users specifying the HOST HTTP header * can't inject attacks. */ @Override public String getServerName() { return HtmlQuoting.quoteHtmlChars(rawRequest.getServerName()); } } @Override public void init(FilterConfig config) throws ServletException { this.config = config; } @Override public void destroy() { } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain ) throws IOException, ServletException { HttpServletRequestWrapper quoted = new RequestQuoter((HttpServletRequest) request); HttpServletResponse httpResponse = (HttpServletResponse) response; String mime = inferMimeType(request); if (mime == null) { httpResponse.setContentType("text/plain; charset=utf-8"); } else if (mime.startsWith("text/html")) { // HTML with unspecified encoding, we want to // force HTML with utf-8 encoding // This is to avoid the following security issue: // http://openmya.hacker.jp/hasegawa/security/utf7cs.html httpResponse.setContentType("text/html; charset=utf-8"); } else if (mime.startsWith("application/xml")) { httpResponse.setContentType("text/xml; charset=utf-8"); } chain.doFilter(quoted, httpResponse); } /** * Infer the mime type for the response based on the extension of the request * URI. Returns null if unknown. */ private String inferMimeType(ServletRequest request) { String path = ((HttpServletRequest)request).getRequestURI(); ContextHandler.SContext sContext = (ContextHandler.SContext)config.getServletContext(); MimeTypes mimes = sContext.getContextHandler().getMimeTypes(); Buffer mimeBuffer = mimes.getMimeByExtension(path); return (mimeBuffer == null) ? null : mimeBuffer.toString(); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.quicksight.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/quicksight-2018-04-01/UpdateTheme" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateThemeResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The ID for the theme. * </p> */ private String themeId; /** * <p> * The Amazon Resource Name (ARN) for the theme. * </p> */ private String arn; /** * <p> * The Amazon Resource Name (ARN) for the new version of the theme. * </p> */ private String versionArn; /** * <p> * The creation status of the theme. * </p> */ private String creationStatus; /** * <p> * The HTTP status of the request. * </p> */ private Integer status; /** * <p> * The Amazon Web Services request ID for this operation. * </p> */ private String requestId; /** * <p> * The ID for the theme. * </p> * * @param themeId * The ID for the theme. */ public void setThemeId(String themeId) { this.themeId = themeId; } /** * <p> * The ID for the theme. * </p> * * @return The ID for the theme. */ public String getThemeId() { return this.themeId; } /** * <p> * The ID for the theme. * </p> * * @param themeId * The ID for the theme. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateThemeResult withThemeId(String themeId) { setThemeId(themeId); return this; } /** * <p> * The Amazon Resource Name (ARN) for the theme. * </p> * * @param arn * The Amazon Resource Name (ARN) for the theme. */ public void setArn(String arn) { this.arn = arn; } /** * <p> * The Amazon Resource Name (ARN) for the theme. * </p> * * @return The Amazon Resource Name (ARN) for the theme. */ public String getArn() { return this.arn; } /** * <p> * The Amazon Resource Name (ARN) for the theme. * </p> * * @param arn * The Amazon Resource Name (ARN) for the theme. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateThemeResult withArn(String arn) { setArn(arn); return this; } /** * <p> * The Amazon Resource Name (ARN) for the new version of the theme. * </p> * * @param versionArn * The Amazon Resource Name (ARN) for the new version of the theme. */ public void setVersionArn(String versionArn) { this.versionArn = versionArn; } /** * <p> * The Amazon Resource Name (ARN) for the new version of the theme. * </p> * * @return The Amazon Resource Name (ARN) for the new version of the theme. */ public String getVersionArn() { return this.versionArn; } /** * <p> * The Amazon Resource Name (ARN) for the new version of the theme. * </p> * * @param versionArn * The Amazon Resource Name (ARN) for the new version of the theme. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateThemeResult withVersionArn(String versionArn) { setVersionArn(versionArn); return this; } /** * <p> * The creation status of the theme. * </p> * * @param creationStatus * The creation status of the theme. * @see ResourceStatus */ public void setCreationStatus(String creationStatus) { this.creationStatus = creationStatus; } /** * <p> * The creation status of the theme. * </p> * * @return The creation status of the theme. * @see ResourceStatus */ public String getCreationStatus() { return this.creationStatus; } /** * <p> * The creation status of the theme. * </p> * * @param creationStatus * The creation status of the theme. * @return Returns a reference to this object so that method calls can be chained together. * @see ResourceStatus */ public UpdateThemeResult withCreationStatus(String creationStatus) { setCreationStatus(creationStatus); return this; } /** * <p> * The creation status of the theme. * </p> * * @param creationStatus * The creation status of the theme. * @return Returns a reference to this object so that method calls can be chained together. * @see ResourceStatus */ public UpdateThemeResult withCreationStatus(ResourceStatus creationStatus) { this.creationStatus = creationStatus.toString(); return this; } /** * <p> * The HTTP status of the request. * </p> * * @param status * The HTTP status of the request. */ public void setStatus(Integer status) { this.status = status; } /** * <p> * The HTTP status of the request. * </p> * * @return The HTTP status of the request. */ public Integer getStatus() { return this.status; } /** * <p> * The HTTP status of the request. * </p> * * @param status * The HTTP status of the request. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateThemeResult withStatus(Integer status) { setStatus(status); return this; } /** * <p> * The Amazon Web Services request ID for this operation. * </p> * * @param requestId * The Amazon Web Services request ID for this operation. */ public void setRequestId(String requestId) { this.requestId = requestId; } /** * <p> * The Amazon Web Services request ID for this operation. * </p> * * @return The Amazon Web Services request ID for this operation. */ public String getRequestId() { return this.requestId; } /** * <p> * The Amazon Web Services request ID for this operation. * </p> * * @param requestId * The Amazon Web Services request ID for this operation. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateThemeResult withRequestId(String requestId) { setRequestId(requestId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getThemeId() != null) sb.append("ThemeId: ").append(getThemeId()).append(","); if (getArn() != null) sb.append("Arn: ").append(getArn()).append(","); if (getVersionArn() != null) sb.append("VersionArn: ").append(getVersionArn()).append(","); if (getCreationStatus() != null) sb.append("CreationStatus: ").append(getCreationStatus()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getRequestId() != null) sb.append("RequestId: ").append(getRequestId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateThemeResult == false) return false; UpdateThemeResult other = (UpdateThemeResult) obj; if (other.getThemeId() == null ^ this.getThemeId() == null) return false; if (other.getThemeId() != null && other.getThemeId().equals(this.getThemeId()) == false) return false; if (other.getArn() == null ^ this.getArn() == null) return false; if (other.getArn() != null && other.getArn().equals(this.getArn()) == false) return false; if (other.getVersionArn() == null ^ this.getVersionArn() == null) return false; if (other.getVersionArn() != null && other.getVersionArn().equals(this.getVersionArn()) == false) return false; if (other.getCreationStatus() == null ^ this.getCreationStatus() == null) return false; if (other.getCreationStatus() != null && other.getCreationStatus().equals(this.getCreationStatus()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getRequestId() == null ^ this.getRequestId() == null) return false; if (other.getRequestId() != null && other.getRequestId().equals(this.getRequestId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getThemeId() == null) ? 0 : getThemeId().hashCode()); hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode()); hashCode = prime * hashCode + ((getVersionArn() == null) ? 0 : getVersionArn().hashCode()); hashCode = prime * hashCode + ((getCreationStatus() == null) ? 0 : getCreationStatus().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getRequestId() == null) ? 0 : getRequestId().hashCode()); return hashCode; } @Override public UpdateThemeResult clone() { try { return (UpdateThemeResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.jdbc.datasource; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import javax.sql.DataSource; import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.util.Assert; /** * Proxy for a target JDBC {@link javax.sql.DataSource}, adding awareness of * Spring-managed transactions. Similar to a transactional JNDI DataSource * as provided by a Java EE server. * * <p>Data access code that should remain unaware of Spring's data access support * can work with this proxy to seamlessly participate in Spring-managed transactions. * Note that the transaction manager, for example {@link DataSourceTransactionManager}, * still needs to work with the underlying DataSource, <i>not</i> with this proxy. * * <p><b>Make sure that TransactionAwareDataSourceProxy is the outermost DataSource * of a chain of DataSource proxies/adapters.</b> TransactionAwareDataSourceProxy * can delegate either directly to the target connection pool or to some * intermediary proxy/adapter like {@link LazyConnectionDataSourceProxy} or * {@link UserCredentialsDataSourceAdapter}. * * <p>Delegates to {@link DataSourceUtils} for automatically participating in * thread-bound transactions, for example managed by {@link DataSourceTransactionManager}. * {@code getConnection} calls and {@code close} calls on returned Connections * will behave properly within a transaction, i.e. always operate on the transactional * Connection. If not within a transaction, normal DataSource behavior applies. * * <p>This proxy allows data access code to work with the plain JDBC API and still * participate in Spring-managed transactions, similar to JDBC code in a Java EE/JTA * environment. However, if possible, use Spring's DataSourceUtils, JdbcTemplate or * JDBC operation objects to get transaction participation even without a proxy for * the target DataSource, avoiding the need to define such a proxy in the first place. * * <p>As a further effect, using a transaction-aware DataSource will apply remaining * transaction timeouts to all created JDBC (Prepared/Callable)Statement. This means * that all operations performed through standard JDBC will automatically participate * in Spring-managed transaction timeouts. * * <p><b>NOTE:</b> This DataSource proxy needs to return wrapped Connections * (which implement the {@link ConnectionProxy} interface) in order to handle * close calls properly. Therefore, the returned Connections cannot be cast * to a native JDBC Connection type such as OracleConnection or to a connection * pool implementation type. Use a corresponding * {@link org.springframework.jdbc.support.nativejdbc.NativeJdbcExtractor} * or JDBC 4's {@link Connection#unwrap} to retrieve the native JDBC Connection. * * @author Juergen Hoeller * @since 1.1 * @see javax.sql.DataSource#getConnection() * @see java.sql.Connection#close() * @see DataSourceUtils#doGetConnection * @see DataSourceUtils#applyTransactionTimeout * @see DataSourceUtils#doReleaseConnection */ public class TransactionAwareDataSourceProxy extends DelegatingDataSource { private boolean reobtainTransactionalConnections = false; /** * Create a new TransactionAwareDataSourceProxy. * @see #setTargetDataSource */ public TransactionAwareDataSourceProxy() { } /** * Create a new TransactionAwareDataSourceProxy. * @param targetDataSource the target DataSource */ public TransactionAwareDataSourceProxy(DataSource targetDataSource) { super(targetDataSource); } /** * Specify whether to reobtain the target Connection for each operation * performed within a transaction. * <p>The default is "false". Specify "true" to reobtain transactional * Connections for every call on the Connection proxy; this is advisable * on JBoss if you hold on to a Connection handle across transaction boundaries. * <p>The effect of this setting is similar to the * "hibernate.connection.release_mode" value "after_statement". */ public void setReobtainTransactionalConnections(boolean reobtainTransactionalConnections) { this.reobtainTransactionalConnections = reobtainTransactionalConnections; } /** * Delegates to DataSourceUtils for automatically participating in Spring-managed * transactions. Throws the original SQLException, if any. * <p>The returned Connection handle implements the ConnectionProxy interface, * allowing to retrieve the underlying target Connection. * @return a transactional Connection if any, a new one else * @see DataSourceUtils#doGetConnection * @see ConnectionProxy#getTargetConnection */ @Override public Connection getConnection() throws SQLException { DataSource ds = getTargetDataSource(); Assert.state(ds != null, "'targetDataSource' is required"); return getTransactionAwareConnectionProxy(ds); } /** * Wraps the given Connection with a proxy that delegates every method call to it * but delegates {@code close()} calls to DataSourceUtils. * @param targetDataSource DataSource that the Connection came from * @return the wrapped Connection * @see java.sql.Connection#close() * @see DataSourceUtils#doReleaseConnection */ protected Connection getTransactionAwareConnectionProxy(DataSource targetDataSource) { return (Connection) Proxy.newProxyInstance( ConnectionProxy.class.getClassLoader(), new Class<?>[] {ConnectionProxy.class}, new TransactionAwareInvocationHandler(targetDataSource)); } /** * Determine whether to obtain a fixed target Connection for the proxy * or to reobtain the target Connection for each operation. * <p>The default implementation returns {@code true} for all * standard cases. This can be overridden through the * {@link #setReobtainTransactionalConnections "reobtainTransactionalConnections"} * flag, which enforces a non-fixed target Connection within an active transaction. * Note that non-transactional access will always use a fixed Connection. * @param targetDataSource the target DataSource */ protected boolean shouldObtainFixedConnection(DataSource targetDataSource) { return (!TransactionSynchronizationManager.isSynchronizationActive() || !this.reobtainTransactionalConnections); } /** * Invocation handler that delegates close calls on JDBC Connections * to DataSourceUtils for being aware of thread-bound transactions. */ private class TransactionAwareInvocationHandler implements InvocationHandler { private final DataSource targetDataSource; private Connection target; private boolean closed = false; public TransactionAwareInvocationHandler(DataSource targetDataSource) { this.targetDataSource = targetDataSource; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { // Invocation on ConnectionProxy interface coming in... if (method.getName().equals("equals")) { // Only considered as equal when proxies are identical. return (proxy == args[0]); } else if (method.getName().equals("hashCode")) { // Use hashCode of Connection proxy. return System.identityHashCode(proxy); } else if (method.getName().equals("toString")) { // Allow for differentiating between the proxy and the raw Connection. StringBuilder sb = new StringBuilder("Transaction-aware proxy for target Connection "); if (this.target != null) { sb.append("[").append(this.target.toString()).append("]"); } else { sb.append(" from DataSource [").append(this.targetDataSource).append("]"); } return sb.toString(); } else if (method.getName().equals("unwrap")) { if (((Class<?>) args[0]).isInstance(proxy)) { return proxy; } } else if (method.getName().equals("isWrapperFor")) { if (((Class<?>) args[0]).isInstance(proxy)) { return true; } } else if (method.getName().equals("close")) { // Handle close method: only close if not within a transaction. DataSourceUtils.doReleaseConnection(this.target, this.targetDataSource); this.closed = true; return null; } else if (method.getName().equals("isClosed")) { return this.closed; } if (this.target == null) { if (this.closed) { throw new SQLException("Connection handle already closed"); } if (shouldObtainFixedConnection(this.targetDataSource)) { this.target = DataSourceUtils.doGetConnection(this.targetDataSource); } } Connection actualTarget = this.target; if (actualTarget == null) { actualTarget = DataSourceUtils.doGetConnection(this.targetDataSource); } if (method.getName().equals("getTargetConnection")) { // Handle getTargetConnection method: return underlying Connection. return actualTarget; } // Invoke method on target Connection. try { Object retVal = method.invoke(actualTarget, args); // If return value is a Statement, apply transaction timeout. // Applies to createStatement, prepareStatement, prepareCall. if (retVal instanceof Statement) { DataSourceUtils.applyTransactionTimeout((Statement) retVal, this.targetDataSource); } return retVal; } catch (InvocationTargetException ex) { throw ex.getTargetException(); } finally { if (actualTarget != this.target) { DataSourceUtils.doReleaseConnection(actualTarget, this.targetDataSource); } } } } }
/* * $Header: /home/cvs/jakarta-tomcat-4.0/catalina/src/share/org/apache/naming/resources/FileDirContext.java,v 1.14 2002/09/18 13:54:33 remm Exp $ * $Revision: 1.14 $ * $Date: 2002/09/18 13:54:33 $ * * ==================================================================== * * The Apache Software License, Version 1.1 * * Copyright (c) 1999 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Tomcat", and "Apache Software * Foundation" must not be used to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache" * nor may "Apache" appear in their names without prior written * permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * * [Additional notices, if required by prior licensing conditions] * */ package org.apache.naming.resources; import java.util.Arrays; import java.util.Hashtable; import java.util.Vector; import java.util.Date; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.FileOutputStream; import java.io.FileNotFoundException; import java.io.OutputStream; import java.io.IOException; import javax.naming.Context; import javax.naming.Name; import javax.naming.NameParser; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.CompositeName; import javax.naming.NameParser; import javax.naming.OperationNotSupportedException; import javax.naming.NameAlreadyBoundException; import javax.naming.directory.DirContext; import javax.naming.directory.Attributes; import javax.naming.directory.Attribute; import javax.naming.directory.ModificationItem; import javax.naming.directory.SearchControls; import org.apache.naming.StringManager; import org.apache.naming.NameParserImpl; import org.apache.naming.NamingEntry; import org.apache.naming.NamingContextBindingsEnumeration; import org.apache.naming.NamingContextEnumeration; /** * Filesystem Directory Context implementation helper class. * * @author Remy Maucherat * @version $Revision: 1.14 $ $Date: 2002/09/18 13:54:33 $ */ public class FileDirContext extends BaseDirContext { // -------------------------------------------------------------- Constants /** * The descriptive information string for this implementation. */ protected static final int BUFFER_SIZE = 2048; // ----------------------------------------------------------- Constructors /** * Builds a file directory context using the given environment. */ public FileDirContext() { super(); } /** * Builds a file directory context using the given environment. */ public FileDirContext(Hashtable env) { super(env); } // ----------------------------------------------------- Instance Variables /** * The document base directory. */ protected File base = null; /** * Absolute normalized filename of the base. */ protected String absoluteBase = null; /** * Case sensitivity. */ protected boolean caseSensitive = true; /** * Allow linking. */ protected boolean allowLinking = false; // ------------------------------------------------------------- Properties /** * Set the document root. * * @param docBase The new document root * * @exception IllegalArgumentException if the specified value is not * supported by this implementation * @exception IllegalArgumentException if this would create a * malformed URL */ public void setDocBase(String docBase) { // Validate the format of the proposed document root if (docBase == null) throw new IllegalArgumentException (sm.getString("resources.null")); // Calculate a File object referencing this document base directory base = new File(docBase); try { base = base.getCanonicalFile(); } catch (IOException e) { // Ignore } // Validate that the document base is an existing directory if (!base.exists() || !base.isDirectory() || !base.canRead()) throw new IllegalArgumentException (sm.getString("fileResources.base", docBase)); this.absoluteBase = base.getAbsolutePath(); super.setDocBase(docBase); } /** * Set case sensitivity. */ public void setCaseSensitive(boolean caseSensitive) { this.caseSensitive = caseSensitive; } /** * Is case sensitive ? */ public boolean isCaseSensitive() { return caseSensitive; } /** * Set allow linking. */ public void setAllowLinking(boolean allowLinking) { this.allowLinking = allowLinking; } /** * Is linking allowed. */ public boolean getAllowLinking() { return allowLinking; } // --------------------------------------------------------- Public Methods /** * Release any resources allocated for this directory context. */ public void release() { caseSensitive = true; allowLinking = false; absoluteBase = null; base = null; super.release(); } // -------------------------------------------------------- Context Methods /** * Retrieves the named object. * * @param name the name of the object to look up * @return the object bound to name * @exception NamingException if a naming exception is encountered */ public Object lookup(String name) throws NamingException { Object result = null; File file = file(name); if (file == null) throw new NamingException (sm.getString("resources.notFound", name)); if (file.isDirectory()) { FileDirContext tempContext = new FileDirContext(env); tempContext.setDocBase(file.getPath()); result = tempContext; } else { result = new FileResource(file); } return result; } /** * Unbinds the named object. Removes the terminal atomic name in name * from the target context--that named by all but the terminal atomic * part of name. * <p> * This method is idempotent. It succeeds even if the terminal atomic * name is not bound in the target context, but throws * NameNotFoundException if any of the intermediate contexts do not exist. * * @param name the name to bind; may not be empty * @exception NameNotFoundException if an intermediate context does not * exist * @exception NamingException if a naming exception is encountered */ public void unbind(String name) throws NamingException { File file = file(name); if (file == null) throw new NamingException (sm.getString("resources.notFound", name)); if (!file.delete()) throw new NamingException (sm.getString("resources.unbindFailed", name)); } /** * Binds a new name to the object bound to an old name, and unbinds the * old name. Both names are relative to this context. Any attributes * associated with the old name become associated with the new name. * Intermediate contexts of the old name are not changed. * * @param oldName the name of the existing binding; may not be empty * @param newName the name of the new binding; may not be empty * @exception NameAlreadyBoundException if newName is already bound * @exception NamingException if a naming exception is encountered */ public void rename(String oldName, String newName) throws NamingException { File file = file(oldName); if (file == null) throw new NamingException (sm.getString("resources.notFound", oldName)); File newFile = new File(base, newName); file.renameTo(newFile); } /** * Enumerates the names bound in the named context, along with the class * names of objects bound to them. The contents of any subcontexts are * not included. * <p> * If a binding is added to or removed from this context, its effect on * an enumeration previously returned is undefined. * * @param name the name of the context to list * @return an enumeration of the names and class names of the bindings in * this context. Each element of the enumeration is of type NameClassPair. * @exception NamingException if a naming exception is encountered */ public NamingEnumeration list(String name) throws NamingException { File file = file(name); if (file == null) throw new NamingException (sm.getString("resources.notFound", name)); Vector entries = list(file); return new NamingContextEnumeration(entries); } /** * Enumerates the names bound in the named context, along with the * objects bound to them. The contents of any subcontexts are not * included. * <p> * If a binding is added to or removed from this context, its effect on * an enumeration previously returned is undefined. * * @param name the name of the context to list * @return an enumeration of the bindings in this context. * Each element of the enumeration is of type Binding. * @exception NamingException if a naming exception is encountered */ public NamingEnumeration listBindings(String name) throws NamingException { File file = file(name); if (file == null) throw new NamingException (sm.getString("resources.notFound", name)); Vector entries = list(file); return new NamingContextBindingsEnumeration(entries); } /** * Destroys the named context and removes it from the namespace. Any * attributes associated with the name are also removed. Intermediate * contexts are not destroyed. * <p> * This method is idempotent. It succeeds even if the terminal atomic * name is not bound in the target context, but throws * NameNotFoundException if any of the intermediate contexts do not exist. * * In a federated naming system, a context from one naming system may be * bound to a name in another. One can subsequently look up and perform * operations on the foreign context using a composite name. However, an * attempt destroy the context using this composite name will fail with * NotContextException, because the foreign context is not a "subcontext" * of the context in which it is bound. Instead, use unbind() to remove * the binding of the foreign context. Destroying the foreign context * requires that the destroySubcontext() be performed on a context from * the foreign context's "native" naming system. * * @param name the name of the context to be destroyed; may not be empty * @exception NameNotFoundException if an intermediate context does not * exist * @exception NotContextException if the name is bound but does not name * a context, or does not name a context of the appropriate type */ public void destroySubcontext(String name) throws NamingException { unbind(name); } /** * Retrieves the named object, following links except for the terminal * atomic component of the name. If the object bound to name is not a * link, returns the object itself. * * @param name the name of the object to look up * @return the object bound to name, not following the terminal link * (if any). * @exception NamingException if a naming exception is encountered */ public Object lookupLink(String name) throws NamingException { // Note : Links are not supported return lookup(name); } /** * Retrieves the full name of this context within its own namespace. * <p> * Many naming services have a notion of a "full name" for objects in * their respective namespaces. For example, an LDAP entry has a * distinguished name, and a DNS record has a fully qualified name. This * method allows the client application to retrieve this name. The string * returned by this method is not a JNDI composite name and should not be * passed directly to context methods. In naming systems for which the * notion of full name does not make sense, * OperationNotSupportedException is thrown. * * @return this context's name in its own namespace; never null * @exception OperationNotSupportedException if the naming system does * not have the notion of a full name * @exception NamingException if a naming exception is encountered */ public String getNameInNamespace() throws NamingException { return docBase; } // ----------------------------------------------------- DirContext Methods /** * Retrieves selected attributes associated with a named object. * See the class description regarding attribute models, attribute type * names, and operational attributes. * * @return the requested attributes; never null * @param name the name of the object from which to retrieve attributes * @param attrIds the identifiers of the attributes to retrieve. null * indicates that all attributes should be retrieved; an empty array * indicates that none should be retrieved * @exception NamingException if a naming exception is encountered */ public Attributes getAttributes(String name, String[] attrIds) throws NamingException { // Building attribute list File file = file(name); if (file == null) throw new NamingException (sm.getString("resources.notFound", name)); return new FileResourceAttributes(file); } /** * Modifies the attributes associated with a named object. The order of * the modifications is not specified. Where possible, the modifications * are performed atomically. * * @param name the name of the object whose attributes will be updated * @param mod_op the modification operation, one of: ADD_ATTRIBUTE, * REPLACE_ATTRIBUTE, REMOVE_ATTRIBUTE * @param attrs the attributes to be used for the modification; may not * be null * @exception AttributeModificationException if the modification cannot be * completed successfully * @exception NamingException if a naming exception is encountered */ public void modifyAttributes(String name, int mod_op, Attributes attrs) throws NamingException { } /** * Modifies the attributes associated with a named object using an an * ordered list of modifications. The modifications are performed in the * order specified. Each modification specifies a modification operation * code and an attribute on which to operate. Where possible, the * modifications are performed atomically. * * @param name the name of the object whose attributes will be updated * @param mods an ordered sequence of modifications to be performed; may * not be null * @exception AttributeModificationException if the modification cannot be * completed successfully * @exception NamingException if a naming exception is encountered */ public void modifyAttributes(String name, ModificationItem[] mods) throws NamingException { } /** * Binds a name to an object, along with associated attributes. If attrs * is null, the resulting binding will have the attributes associated * with obj if obj is a DirContext, and no attributes otherwise. If attrs * is non-null, the resulting binding will have attrs as its attributes; * any attributes associated with obj are ignored. * * @param name the name to bind; may not be empty * @param obj the object to bind; possibly null * @param attrs the attributes to associate with the binding * @exception NameAlreadyBoundException if name is already bound * @exception InvalidAttributesException if some "mandatory" attributes * of the binding are not supplied * @exception NamingException if a naming exception is encountered */ public void bind(String name, Object obj, Attributes attrs) throws NamingException { // Note: No custom attributes allowed File file = new File(base, name); if (file.exists()) throw new NameAlreadyBoundException (sm.getString("resources.alreadyBound", name)); rebind(name, obj, attrs); } /** * Binds a name to an object, along with associated attributes, * overwriting any existing binding. If attrs is null and obj is a * DirContext, the attributes from obj are used. If attrs is null and obj * is not a DirContext, any existing attributes associated with the object * already bound in the directory remain unchanged. If attrs is non-null, * any existing attributes associated with the object already bound in * the directory are removed and attrs is associated with the named * object. If obj is a DirContext and attrs is non-null, the attributes * of obj are ignored. * * @param name the name to bind; may not be empty * @param obj the object to bind; possibly null * @param attrs the attributes to associate with the binding * @exception InvalidAttributesException if some "mandatory" attributes * of the binding are not supplied * @exception NamingException if a naming exception is encountered */ public void rebind(String name, Object obj, Attributes attrs) throws NamingException { // Note: No custom attributes allowed // Check obj type File file = new File(base, name); InputStream is = null; if (obj instanceof Resource) { try { is = ((Resource) obj).streamContent(); } catch (IOException e) { } } else if (obj instanceof InputStream) { is = (InputStream) obj; } else if (obj instanceof DirContext) { if (file.exists()) { if (!file.delete()) throw new NamingException (sm.getString("resources.bindFailed", name)); } if (!file.mkdir()) throw new NamingException (sm.getString("resources.bindFailed", name)); } if (is == null) throw new NamingException (sm.getString("resources.bindFailed", name)); // Open os try { FileOutputStream os = null; byte buffer[] = new byte[BUFFER_SIZE]; int len = -1; try { os = new FileOutputStream(file); while (true) { len = is.read(buffer); if (len == -1) break; os.write(buffer, 0, len); } } finally { if (os != null) os.close(); is.close(); } } catch (IOException e) { throw new NamingException (sm.getString("resources.bindFailed", e)); } } /** * Creates and binds a new context, along with associated attributes. * This method creates a new subcontext with the given name, binds it in * the target context (that named by all but terminal atomic component of * the name), and associates the supplied attributes with the newly * created object. All intermediate and target contexts must already * exist. If attrs is null, this method is equivalent to * Context.createSubcontext(). * * @param name the name of the context to create; may not be empty * @param attrs the attributes to associate with the newly created context * @return the newly created context * @exception NameAlreadyBoundException if the name is already bound * @exception InvalidAttributesException if attrs does not contain all * the mandatory attributes required for creation * @exception NamingException if a naming exception is encountered */ public DirContext createSubcontext(String name, Attributes attrs) throws NamingException { File file = new File(base, name); if (file.exists()) throw new NameAlreadyBoundException (sm.getString("resources.alreadyBound", name)); if (!file.mkdir()) throw new NamingException (sm.getString("resources.bindFailed", name)); return (DirContext) lookup(name); } /** * Retrieves the schema associated with the named object. The schema * describes rules regarding the structure of the namespace and the * attributes stored within it. The schema specifies what types of * objects can be added to the directory and where they can be added; * what mandatory and optional attributes an object can have. The range * of support for schemas is directory-specific. * * @param name the name of the object whose schema is to be retrieved * @return the schema associated with the context; never null * @exception OperationNotSupportedException if schema not supported * @exception NamingException if a naming exception is encountered */ public DirContext getSchema(String name) throws NamingException { throw new OperationNotSupportedException(); } /** * Retrieves a context containing the schema objects of the named * object's class definitions. * * @param name the name of the object whose object class definition is to * be retrieved * @return the DirContext containing the named object's class * definitions; never null * @exception OperationNotSupportedException if schema not supported * @exception NamingException if a naming exception is encountered */ public DirContext getSchemaClassDefinition(String name) throws NamingException { throw new OperationNotSupportedException(); } /** * Searches in a single context for objects that contain a specified set * of attributes, and retrieves selected attributes. The search is * performed using the default SearchControls settings. * * @param name the name of the context to search * @param matchingAttributes the attributes to search for. If empty or * null, all objects in the target context are returned. * @param attributesToReturn the attributes to return. null indicates * that all attributes are to be returned; an empty array indicates that * none are to be returned. * @return a non-null enumeration of SearchResult objects. Each * SearchResult contains the attributes identified by attributesToReturn * and the name of the corresponding object, named relative to the * context named by name. * @exception NamingException if a naming exception is encountered */ public NamingEnumeration search(String name, Attributes matchingAttributes, String[] attributesToReturn) throws NamingException { return null; } /** * Searches in a single context for objects that contain a specified set * of attributes. This method returns all the attributes of such objects. * It is equivalent to supplying null as the atributesToReturn parameter * to the method search(Name, Attributes, String[]). * * @param name the name of the context to search * @param matchingAttributes the attributes to search for. If empty or * null, all objects in the target context are returned. * @return a non-null enumeration of SearchResult objects. Each * SearchResult contains the attributes identified by attributesToReturn * and the name of the corresponding object, named relative to the * context named by name. * @exception NamingException if a naming exception is encountered */ public NamingEnumeration search(String name, Attributes matchingAttributes) throws NamingException { return null; } /** * Searches in the named context or object for entries that satisfy the * given search filter. Performs the search as specified by the search * controls. * * @param name the name of the context or object to search * @param filter the filter expression to use for the search; may not be * null * @param cons the search controls that control the search. If null, * the default search controls are used (equivalent to * (new SearchControls())). * @return an enumeration of SearchResults of the objects that satisfy * the filter; never null * @exception InvalidSearchFilterException if the search filter specified * is not supported or understood by the underlying directory * @exception InvalidSearchControlsException if the search controls * contain invalid settings * @exception NamingException if a naming exception is encountered */ public NamingEnumeration search(String name, String filter, SearchControls cons) throws NamingException { return null; } /** * Searches in the named context or object for entries that satisfy the * given search filter. Performs the search as specified by the search * controls. * * @param name the name of the context or object to search * @param filterExpr the filter expression to use for the search. * The expression may contain variables of the form "{i}" where i is a * nonnegative integer. May not be null. * @param filterArgs the array of arguments to substitute for the * variables in filterExpr. The value of filterArgs[i] will replace each * occurrence of "{i}". If null, equivalent to an empty array. * @param cons the search controls that control the search. If null, the * default search controls are used (equivalent to (new SearchControls())). * @return an enumeration of SearchResults of the objects that satisy the * filter; never null * @exception ArrayIndexOutOfBoundsException if filterExpr contains {i} * expressions where i is outside the bounds of the array filterArgs * @exception InvalidSearchControlsException if cons contains invalid * settings * @exception InvalidSearchFilterException if filterExpr with filterArgs * represents an invalid search filter * @exception NamingException if a naming exception is encountered */ public NamingEnumeration search(String name, String filterExpr, Object[] filterArgs, SearchControls cons) throws NamingException { return null; } // ------------------------------------------------------ Protected Methods /** * Return a context-relative path, beginning with a "/", that represents * the canonical version of the specified path after ".." and "." elements * are resolved out. If the specified path attempts to go outside the * boundaries of the current context (i.e. too many ".." path elements * are present), return <code>null</code> instead. * * @param path Path to be normalized */ protected String normalize(String path) { String normalized = path; // Normalize the slashes and add leading slash if necessary if (normalized.indexOf('\\') >= 0) normalized = normalized.replace('\\', '/'); if (!normalized.startsWith("/")) normalized = "/" + normalized; // Resolve occurrences of "//" in the normalized path while (true) { int index = normalized.indexOf("//"); if (index < 0) break; normalized = normalized.substring(0, index) + normalized.substring(index + 1); } // Resolve occurrences of "/./" in the normalized path while (true) { int index = normalized.indexOf("/./"); if (index < 0) break; normalized = normalized.substring(0, index) + normalized.substring(index + 2); } // Resolve occurrences of "/../" in the normalized path while (true) { int index = normalized.indexOf("/../"); if (index < 0) break; if (index == 0) return (null); // Trying to go outside our context int index2 = normalized.lastIndexOf('/', index - 1); normalized = normalized.substring(0, index2) + normalized.substring(index + 3); } // Return the normalized path that we have completed return (normalized); } /** * Return a File object representing the specified normalized * context-relative path if it exists and is readable. Otherwise, * return <code>null</code>. * * @param name Normalized context-relative path (with leading '/') */ protected File file(String name) { File file = new File(base, name); if (file.exists() && file.canRead()) { // Check that this file belongs to our root path String canPath = null; try { canPath = file.getCanonicalPath(); } catch (IOException e) { } if (canPath == null) return null; // Check to see if going outside of the web application root if ((!allowLinking) && (!canPath.startsWith(absoluteBase))) { return null; } // Case sensitivity check if (caseSensitive) { String fileAbsPath = file.getAbsolutePath(); if (fileAbsPath.endsWith(".")) fileAbsPath = fileAbsPath + "/"; String absPath = normalize(fileAbsPath); if (canPath != null) canPath = normalize(canPath); if ((absoluteBase.length() < absPath.length()) && (absoluteBase.length() < canPath.length())) { absPath = absPath.substring(absoluteBase.length() + 1); if ((canPath == null) || (absPath == null)) return null; if (absPath.equals("")) absPath = "/"; canPath = canPath.substring(absoluteBase.length() + 1); if (canPath.equals("")) canPath = "/"; if (!canPath.equals(absPath)) return null; } } } else { return null; } return file; } /** * List the resources which are members of a collection. * * @param file Collection * @return Vector containg NamingEntry objects */ protected Vector list(File file) { Vector entries = new Vector(); if (!file.isDirectory()) return entries; String[] names = file.list(); Arrays.sort(names); // Sort alphabetically if (names == null) return entries; NamingEntry entry = null; for (int i = 0; i < names.length; i++) { File currentFile = new File(file, names[i]); Object object = null; if (currentFile.isDirectory()) { FileDirContext tempContext = new FileDirContext(env); tempContext.setDocBase(file.getPath()); object = tempContext; } else { object = new FileResource(currentFile); } entry = new NamingEntry(names[i], object, NamingEntry.ENTRY); entries.addElement(entry); } return entries; } // ----------------------------------------------- FileResource Inner Class /** * This specialized resource implementation avoids opening the IputStream * to the file right away (which would put a lock on the file). */ protected class FileResource extends Resource { // -------------------------------------------------------- Constructor public FileResource(File file) { this.file = file; } // --------------------------------------------------- Member Variables /** * Associated file object. */ protected File file; /** * File length. */ protected long length = -1L; // --------------------------------------------------- Resource Methods /** * Content accessor. * * @return InputStream */ public InputStream streamContent() throws IOException { if (binaryContent == null) { inputStream = new FileInputStream(file); } return super.streamContent(); } } // ------------------------------------- FileResourceAttributes Inner Class /** * This specialized resource attribute implementation does some lazy * reading (to speed up simple checks, like checking the last modified * date). */ protected class FileResourceAttributes extends ResourceAttributes { // -------------------------------------------------------- Constructor public FileResourceAttributes(File file) { this.file = file; } // --------------------------------------------------- Member Variables protected File file; protected boolean accessed = false; // ----------------------------------------- ResourceAttributes Methods /** * Is collection. */ public boolean isCollection() { if (!accessed) { collection = file.isDirectory(); accessed = true; } return super.isCollection(); } /** * Get content length. * * @return content length value */ public long getContentLength() { if (contentLength != -1L) return contentLength; contentLength = file.length(); return contentLength; } /** * Get creation time. * * @return creation time value */ public long getCreation() { if (creation != -1L) return creation; creation = file.lastModified(); return creation; } /** * Get creation date. * * @return Creation date value */ public Date getCreationDate() { if (creation == -1L) { creation = file.lastModified(); } return super.getCreationDate(); } /** * Get last modified time. * * @return lastModified time value */ public long getLastModified() { if (lastModified != -1L) return lastModified; lastModified = file.lastModified(); return lastModified; } /** * Get lastModified date. * * @return LastModified date value */ public Date getLastModifiedDate() { if (lastModified == -1L) { lastModified = file.lastModified(); } return super.getLastModifiedDate(); } /** * Get name. * * @return Name value */ public String getName() { if (name == null) name = file.getName(); return name; } /** * Get resource type. * * @return String resource type */ public String getResourceType() { if (!accessed) { collection = file.isDirectory(); accessed = true; } return super.getResourceType(); } } }
/* * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.scheduling.concurrent; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import org.springframework.beans.factory.FactoryBean; import org.springframework.scheduling.support.DelegatingErrorHandlingRunnable; import org.springframework.scheduling.support.TaskUtils; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** * {@link org.springframework.beans.factory.FactoryBean} that sets up * a {@link java.util.concurrent.ScheduledExecutorService} * (by default: a {@link java.util.concurrent.ScheduledThreadPoolExecutor}) * and exposes it for bean references. * * <p>Allows for registration of {@link ScheduledExecutorTask ScheduledExecutorTasks}, * automatically starting the {@link ScheduledExecutorService} on initialization and * cancelling it on destruction of the context. In scenarios that only require static * registration of tasks at startup, there is no need to access the * {@link ScheduledExecutorService} instance itself in application code at all; * ScheduledExecutorFactoryBean is then just being used for lifecycle integration. * * <p>Note that {@link java.util.concurrent.ScheduledExecutorService} * uses a {@link Runnable} instance that is shared between repeated executions, * in contrast to Quartz which instantiates a new Job for each execution. * * <p><b>WARNING:</b> {@link Runnable Runnables} submitted via a native * {@link java.util.concurrent.ScheduledExecutorService} are removed from * the execution schedule once they throw an exception. If you would prefer * to continue execution after such an exception, switch this FactoryBean's * {@link #setContinueScheduledExecutionAfterException "continueScheduledExecutionAfterException"} * property to "true". * * @author Juergen Hoeller * @since 2.0 * @see #setPoolSize * @see #setThreadFactory * @see ScheduledExecutorTask * @see java.util.concurrent.ScheduledExecutorService * @see java.util.concurrent.ScheduledThreadPoolExecutor */ @SuppressWarnings("serial") public class ScheduledExecutorFactoryBean extends ExecutorConfigurationSupport implements FactoryBean<ScheduledExecutorService> { private int poolSize = 1; private ScheduledExecutorTask[] scheduledExecutorTasks; private boolean continueScheduledExecutionAfterException = false; private boolean exposeUnconfigurableExecutor = false; private ScheduledExecutorService exposedExecutor; /** * Set the ScheduledExecutorService's pool size. * Default is 1. */ public void setPoolSize(int poolSize) { Assert.isTrue(poolSize > 0, "'poolSize' must be 1 or higher"); this.poolSize = poolSize; } /** * Register a list of ScheduledExecutorTask objects with the ScheduledExecutorService * that this FactoryBean creates. Depending on each ScheduledExecutorTask's settings, * it will be registered via one of ScheduledExecutorService's schedule methods. * @see java.util.concurrent.ScheduledExecutorService#schedule(java.lang.Runnable, long, java.util.concurrent.TimeUnit) * @see java.util.concurrent.ScheduledExecutorService#scheduleWithFixedDelay(java.lang.Runnable, long, long, java.util.concurrent.TimeUnit) * @see java.util.concurrent.ScheduledExecutorService#scheduleAtFixedRate(java.lang.Runnable, long, long, java.util.concurrent.TimeUnit) */ public void setScheduledExecutorTasks(ScheduledExecutorTask... scheduledExecutorTasks) { this.scheduledExecutorTasks = scheduledExecutorTasks; } /** * Specify whether to continue the execution of a scheduled task * after it threw an exception. * <p>Default is "false", matching the native behavior of a * {@link java.util.concurrent.ScheduledExecutorService}. * Switch this flag to "true" for exception-proof execution of each task, * continuing scheduled execution as in the case of successful execution. * @see java.util.concurrent.ScheduledExecutorService#scheduleAtFixedRate */ public void setContinueScheduledExecutionAfterException(boolean continueScheduledExecutionAfterException) { this.continueScheduledExecutionAfterException = continueScheduledExecutionAfterException; } /** * Specify whether this FactoryBean should expose an unconfigurable * decorator for the created executor. * <p>Default is "false", exposing the raw executor as bean reference. * Switch this flag to "true" to strictly prevent clients from * modifying the executor's configuration. * @see java.util.concurrent.Executors#unconfigurableScheduledExecutorService */ public void setExposeUnconfigurableExecutor(boolean exposeUnconfigurableExecutor) { this.exposeUnconfigurableExecutor = exposeUnconfigurableExecutor; } @Override protected ExecutorService initializeExecutor( ThreadFactory threadFactory, RejectedExecutionHandler rejectedExecutionHandler) { ScheduledExecutorService executor = createExecutor(this.poolSize, threadFactory, rejectedExecutionHandler); // Register specified ScheduledExecutorTasks, if necessary. if (!ObjectUtils.isEmpty(this.scheduledExecutorTasks)) { registerTasks(this.scheduledExecutorTasks, executor); } // Wrap executor with an unconfigurable decorator. this.exposedExecutor = (this.exposeUnconfigurableExecutor ? Executors.unconfigurableScheduledExecutorService(executor) : executor); return executor; } /** * Create a new {@link ScheduledExecutorService} instance. * <p>The default implementation creates a {@link ScheduledThreadPoolExecutor}. * Can be overridden in subclasses to provide custom {@link ScheduledExecutorService} instances. * @param poolSize the specified pool size * @param threadFactory the ThreadFactory to use * @param rejectedExecutionHandler the RejectedExecutionHandler to use * @return a new ScheduledExecutorService instance * @see #afterPropertiesSet() * @see java.util.concurrent.ScheduledThreadPoolExecutor */ protected ScheduledExecutorService createExecutor( int poolSize, ThreadFactory threadFactory, RejectedExecutionHandler rejectedExecutionHandler) { return new ScheduledThreadPoolExecutor(poolSize, threadFactory, rejectedExecutionHandler); } /** * Register the specified {@link ScheduledExecutorTask ScheduledExecutorTasks} * on the given {@link ScheduledExecutorService}. * @param tasks the specified ScheduledExecutorTasks (never empty) * @param executor the ScheduledExecutorService to register the tasks on. */ protected void registerTasks(ScheduledExecutorTask[] tasks, ScheduledExecutorService executor) { for (ScheduledExecutorTask task : tasks) { Runnable runnable = getRunnableToSchedule(task); if (task.isOneTimeTask()) { executor.schedule(runnable, task.getDelay(), task.getTimeUnit()); } else { if (task.isFixedRate()) { executor.scheduleAtFixedRate(runnable, task.getDelay(), task.getPeriod(), task.getTimeUnit()); } else { executor.scheduleWithFixedDelay(runnable, task.getDelay(), task.getPeriod(), task.getTimeUnit()); } } } } /** * Determine the actual Runnable to schedule for the given task. * <p>Wraps the task's Runnable in a * {@link org.springframework.scheduling.support.DelegatingErrorHandlingRunnable} * that will catch and log the Exception. If necessary, it will suppress the * Exception according to the * {@link #setContinueScheduledExecutionAfterException "continueScheduledExecutionAfterException"} * flag. * @param task the ScheduledExecutorTask to schedule * @return the actual Runnable to schedule (may be a decorator) */ protected Runnable getRunnableToSchedule(ScheduledExecutorTask task) { return (this.continueScheduledExecutionAfterException ? new DelegatingErrorHandlingRunnable(task.getRunnable(), TaskUtils.LOG_AND_SUPPRESS_ERROR_HANDLER) : new DelegatingErrorHandlingRunnable(task.getRunnable(), TaskUtils.LOG_AND_PROPAGATE_ERROR_HANDLER)); } @Override public ScheduledExecutorService getObject() { return this.exposedExecutor; } @Override public Class<? extends ScheduledExecutorService> getObjectType() { return (this.exposedExecutor != null ? this.exposedExecutor.getClass() : ScheduledExecutorService.class); } @Override public boolean isSingleton() { return true; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nutch.crawl; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.FloatWritable; import org.apache.nutch.crawl.CrawlDatum; import org.apache.nutch.metadata.Nutch; import org.apache.nutch.util.NutchConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class implements an adaptive re-fetch algorithm. This works as follows: * <ul> * <li>for pages that has changed since the last fetchTime, decrease their * fetchInterval by a factor of DEC_FACTOR (default value is 0.2f).</li> * <li>for pages that haven't changed since the last fetchTime, increase their * fetchInterval by a factor of INC_FACTOR (default value is 0.2f).<br> * If SYNC_DELTA property is true, then: * <ul> * <li>calculate a <code>delta = fetchTime - modifiedTime</code></li> * <li>try to synchronize with the time of change, by shifting the next * fetchTime by a fraction of the difference between the last modification time * and the last fetch time. I.e. the next fetch time will be set to * <code>fetchTime + fetchInterval - delta * SYNC_DELTA_RATE</code></li> * <li>if the adjusted fetch interval is bigger than the delta, then * <code>fetchInterval = delta</code>.</li> * </ul> * </li> * <li>the minimum value of fetchInterval may not be smaller than MIN_INTERVAL * (default is 1 minute).</li> * <li>the maximum value of fetchInterval may not be bigger than MAX_INTERVAL * (default is 365 days).</li> * </ul> * <p> * NOTE: values of DEC_FACTOR and INC_FACTOR higher than 0.4f may destabilize * the algorithm, so that the fetch interval either increases or decreases * infinitely, with little relevance to the page changes. Please use * {@link #main(String[])} method to test the values before applying them in a * production system. * </p> * * @author Andrzej Bialecki */ public class AdaptiveFetchSchedule extends AbstractFetchSchedule { // Loggg public static final Logger LOG = LoggerFactory .getLogger(AbstractFetchSchedule.class); protected float INC_RATE; protected float DEC_RATE; private int MAX_INTERVAL; private int MIN_INTERVAL; private boolean SYNC_DELTA; private double SYNC_DELTA_RATE; public void setConf(Configuration conf) { super.setConf(conf); if (conf == null) return; INC_RATE = conf.getFloat("db.fetch.schedule.adaptive.inc_rate", 0.2f); DEC_RATE = conf.getFloat("db.fetch.schedule.adaptive.dec_rate", 0.2f); MIN_INTERVAL = conf.getInt("db.fetch.schedule.adaptive.min_interval", 60); MAX_INTERVAL = conf.getInt("db.fetch.schedule.adaptive.max_interval", SECONDS_PER_DAY * 365); // 1 year SYNC_DELTA = conf.getBoolean("db.fetch.schedule.adaptive.sync_delta", true); SYNC_DELTA_RATE = conf.getFloat( "db.fetch.schedule.adaptive.sync_delta_rate", 0.2f); } @Override public CrawlDatum setFetchSchedule(Text url, CrawlDatum datum, long prevFetchTime, long prevModifiedTime, long fetchTime, long modifiedTime, int state) { super.setFetchSchedule(url, datum, prevFetchTime, prevModifiedTime, fetchTime, modifiedTime, state); float interval = datum.getFetchInterval(); long refTime = fetchTime; // https://issues.apache.org/jira/browse/NUTCH-1430 interval = (interval == 0) ? defaultInterval : interval; if (datum.getMetaData().containsKey(Nutch.WRITABLE_FIXED_INTERVAL_KEY)) { // Is fetch interval preset in CrawlDatum MD? Then use preset interval FloatWritable customIntervalWritable = (FloatWritable) (datum .getMetaData().get(Nutch.WRITABLE_FIXED_INTERVAL_KEY)); interval = customIntervalWritable.get(); } else { if (modifiedTime <= 0) modifiedTime = fetchTime; switch (state) { case FetchSchedule.STATUS_MODIFIED: interval *= (1.0f - DEC_RATE); break; case FetchSchedule.STATUS_NOTMODIFIED: interval *= (1.0f + INC_RATE); break; case FetchSchedule.STATUS_UNKNOWN: break; } if (SYNC_DELTA) { // try to synchronize with the time of change long delta = (fetchTime - modifiedTime) / 1000L; if (delta > interval) interval = delta; refTime = fetchTime - Math.round(delta * SYNC_DELTA_RATE * 1000); } if (interval < MIN_INTERVAL) { interval = MIN_INTERVAL; } else if (interval > MAX_INTERVAL) { interval = MAX_INTERVAL; } } datum.setFetchInterval(interval); datum.setFetchTime(refTime + Math.round(interval * 1000.0)); datum.setModifiedTime(modifiedTime); return datum; } public static void main(String[] args) throws Exception { FetchSchedule fs = new AdaptiveFetchSchedule(); fs.setConf(NutchConfiguration.create()); // we start the time at 0, for simplicity long curTime = 0; long delta = 1000L * 3600L * 24L; // 2 hours // we trigger the update of the page every 30 days long update = 1000L * 3600L * 24L * 30L; // 30 days boolean changed = true; long lastModified = 0; int miss = 0; int totalMiss = 0; int maxMiss = 0; int fetchCnt = 0; int changeCnt = 0; // initial fetchInterval is 10 days CrawlDatum p = new CrawlDatum(1, 3600 * 24 * 30, 1.0f); p.setFetchTime(0); LOG.info(p.toString()); // let's move the timeline a couple of deltas for (int i = 0; i < 10000; i++) { if (lastModified + update < curTime) { // System.out.println("i=" + i + ", lastModified=" + lastModified + // ", update=" + update + ", curTime=" + curTime); changed = true; changeCnt++; lastModified = curTime; } LOG.info(i + ". " + changed + "\twill fetch at " + (p.getFetchTime() / delta) + "\tinterval " + (p.getFetchInterval() / SECONDS_PER_DAY) + " days" + "\t missed " + miss); if (p.getFetchTime() <= curTime) { fetchCnt++; fs.setFetchSchedule(new Text("http://www.example.com"), p, p .getFetchTime(), p.getModifiedTime(), curTime, lastModified, changed ? FetchSchedule.STATUS_MODIFIED : FetchSchedule.STATUS_NOTMODIFIED); LOG.info("\tfetched & adjusted: " + "\twill fetch at " + (p.getFetchTime() / delta) + "\tinterval " + (p.getFetchInterval() / SECONDS_PER_DAY) + " days"); if (!changed) miss++; if (miss > maxMiss) maxMiss = miss; changed = false; totalMiss += miss; miss = 0; } if (changed) miss++; curTime += delta; } LOG.info("Total missed: " + totalMiss + ", max miss: " + maxMiss); LOG.info("Page changed " + changeCnt + " times, fetched " + fetchCnt + " times."); } }
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.vocabulary.flow.util; import com.asakusafw.vocabulary.flow.Source; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.Checkpoint; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.Confluent; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.Empty; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.EmptyFragment; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.Extend; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.ExtendFragment; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.Project; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.ProjectFragment; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.Restructure; import com.asakusafw.vocabulary.flow.util.CoreOperatorFactory.RestructureFragment; /** * Provides factory methods for core operators. * @since 0.2.6 * @version 0.9.0 */ public final class CoreOperators { private static final CoreOperatorFactory FACTORY = new CoreOperatorFactory(); private CoreOperators() { return; } /** * Returns a new <em>empty operator</em> instance. * The resulting operator acts like a dummy input which provides an empty data-sets. * @param <T> the data model type * @param type the data model type * @return a new instance of <em>empty operator</em> * @throws IllegalArgumentException if the parameter is {@code null} * @see CoreOperatorFactory#empty(Class) */ public static <T> Empty<T> empty(Class<T> type) { return FACTORY.empty(type); } /** * Returns a new fragment which will be provide an <em>empty operator</em>. * The resulting object will require the target (downstream) data model type. * @return a new fragment of <em>empty operator</em> * @since 0.7.3 * @see CoreOperatorFactory#empty() */ public static EmptyFragment empty() { return FACTORY.empty(); } /** * Terminates the upstream source. * Generally, operator outputs must be connected to at least one operator inputs for terminating data flow. * This method internally connects the upstream source to a <em>stop</em> operator input. * It operator will do nothing for any inputs and just drops them. * @param in the upstream source * @throws IllegalArgumentException if the parameter is {@code null} * @see CoreOperatorFactory#stop(Source) */ public static void stop(Source<?> in) { FACTORY.stop(in); } /** * Returns a new <em>confluent operator</em> instance. * The resulting operator puts the data from each upstream source together and provides them as the output. * @param <T> the data model type * @param a the upstream source (1) * @param b the upstream source (2) * @return a new instance of <em>confluent operator</em> * @throws IllegalArgumentException if the parameters are {@code null} * @see CoreOperatorFactory#confluent(Source, Source) */ public static <T> Confluent<T> confluent(Source<T> a, Source<T> b) { return FACTORY.confluent(a, b); } /** * Returns a new <em>confluent operator</em> instance. * The resulting operator puts the data from each upstream source together and provides them as the output. * @param <T> the data model type * @param a the upstream source (1) * @param b the upstream source (2) * @param c the upstream source (3) * @return a new instance of <em>confluent operator</em> * @throws IllegalArgumentException if the parameters are {@code null} * @see CoreOperatorFactory#confluent(Source, Source, Source) */ public static <T> Confluent<T> confluent(Source<T> a, Source<T> b, Source<T> c) { return FACTORY.confluent(a, b, c); } /** * Returns a new <em>confluent operator</em> instance. * The resulting operator puts the data from each upstream source together and provides them as the output. * @param <T> the data model type * @param a the upstream source (1) * @param b the upstream source (2) * @param c the upstream source (3) * @param d the upstream source (4) * @return a new instance of <em>confluent operator</em> * @throws IllegalArgumentException if the parameters are {@code null} * @see CoreOperatorFactory#confluent(Source, Source, Source, Source) */ public static <T> Confluent<T> confluent(Source<T> a, Source<T> b, Source<T> c, Source<T> d) { return FACTORY.confluent(a, b, c, d); } /** * Returns a new <em>confluent operator</em> instance. * The resulting operator puts the data from each upstream source together and provides them as the output. * @param <T> the data model type * @param inputs the upstream sources * @return a new instance of <em>confluent operator</em> * @throws IllegalArgumentException if the parameter is {@code null} * @see CoreOperatorFactory#confluent(Iterable) */ public static <T> Confluent<T> confluent(Iterable<? extends Source<T>> inputs) { return FACTORY.confluent(inputs); } /** * Returns a new <em>confluent operator</em> instance. * The resulting operator puts the data from each upstream source together and provides them as the output. * @param <T> the data model type * @param inputs the upstream sources * @return a new instance of <em>confluent operator</em> * @throws IllegalArgumentException if the parameter is {@code null} * @see CoreOperatorFactory#confluent(Iterable) * @since 0.9.0 */ @SafeVarargs public static <T> Confluent<T> confluent(Source<T>... inputs) { return FACTORY.confluent(inputs); } /** * Returns a new <em>checkpoint operator</em>. * The resulting operator will provide a restarting point for the (partial) failure. * @param <T> the data model type * @param in the upstream source * @return a new instance of <em>checkpoint operator</em> * @throws IllegalArgumentException if the parameter is {@code null} * @see CoreOperatorFactory#checkpoint(Source) */ public static <T> Checkpoint<T> checkpoint(Source<T> in) { return FACTORY.checkpoint(in); } /** * Returns a new <em>project operator</em> instance. * The source (upstream) data type must have all properties declared in the target (downstream) data type. * This operator will copy such properties in the upstream data into the each resulting data. * If the target data model type has extra properties for the upstream data type, or if there are type incompatible * properties between the source and target data model, compiling this operator must be failed. * @param <T> the target data model type * @param in the upstream source * @param targetType the target data model class * @return a new instance of <em>project operator</em> * @throws IllegalArgumentException if some parameters were {@code null} * @see CoreOperatorFactory#project(Source, Class) */ public static <T> Project<T> project(Source<?> in, Class<T> targetType) { return FACTORY.project(in, targetType); } /** * Returns a new fragment which will be provide a <em>project operator</em>. * The resulting fragment object will require the target (downstream) data model type. * @param in the upstream source * @return a new fragment of <em>project operator</em> * @throws IllegalArgumentException if the parameters are {@code null} * @since 0.7.3 * @see CoreOperatorFactory#project(Source) */ public static ProjectFragment project(Source<?> in) { return FACTORY.project(in); } /** * Returns a new <em>extend operator</em> instance. * The target (downstream) data type must have all properties declared in the source (upstream) data type. * This operator will copy such properties in the upstream data into the each resulting data. * If the target data model type does not have some properties in the upstream data type, or if there are type * incompatible properties between the source and target data model, compiling this operator must be failed. * @param <T> the target data model type * @param in the upstream source * @param targetType the target data model class * @return a new instance of <em>extend operator</em> * @throws IllegalArgumentException if the parameters are {@code null} * @see CoreOperatorFactory#extend(Source, Class) */ public static <T> Extend<T> extend(Source<?> in, Class<T> targetType) { return FACTORY.extend(in, targetType); } /** * Returns a new fragment which will be provide an <em>extend operator</em>. * The resulting fragment object will require the target (downstream) data model type. * @param in the upstream source * @return a new fragment of <em>extend operator</em> * @throws IllegalArgumentException if the parameters are {@code null} * @since 0.7.3 * @see CoreOperatorFactory#extend(Source) */ public static ExtendFragment extend(Source<?> in) { return FACTORY.extend(in); } /** * Returns a new <em>restructure operator</em> instance. * The target (downstream) data type must have one or more properties declared in the source (upstream) data type. * This operator will copy such properties in the upstream data into the each resulting data. * If there are type incompatible properties between the source and target data model, compiling this operator must * be failed. * @param <T> the target data model type * @param in the upstream source * @param targetType the target data model class * @return a new instance of <em>restructure operator</em> * @throws IllegalArgumentException if the parameters are {@code null} * @see CoreOperatorFactory#restructure(Source, Class) */ public static <T> Restructure<T> restructure(Source<?> in, Class<T> targetType) { return FACTORY.restructure(in, targetType); } /** * Returns a new fragment which will be provide a <em>restructure operator</em>. * The resulting fragment object will require the target (downstream) data model type. * @param in the upstream source * @return a new fragment of <em>restructure operator</em> * @throws IllegalArgumentException if the parameters are {@code null} * @since 0.7.3 * @see CoreOperatorFactory#restructure(Source) */ public static RestructureFragment restructure(Source<?> in) { return FACTORY.restructure(in); } }
package info.kapable.app.ComptesPerso.pojo; import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.Table; import javax.validation.constraints.Size; import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @Entity @Table(name = "OPERATION") @JsonIgnoreProperties(ignoreUnknown = false) /** * Operation entity, a financial operation on a account * * @author Mathieu GOULIN <mathieu.goulin@gadz.org> */ public class Operation extends Pojo { /* Private properties */ private Long id; private Double credit = 0.; private Double debit = 0.; @JsonFormat(shape=JsonFormat.Shape.STRING, pattern="yyyy-MM-dd'T'HH:mm:ss.SSSS") private Date date; @Size(min=2, max=255, message = "{errors.label.size}") private String description = ""; private Boolean pointedTransaction = false; /* Link */ private Account account; private Category category; private ThirdParty thirdParty; /* Optional */ private String transfertNumber; private String chequeNumber; private String slipNumber; /** * Getter and Setter */ @Id @GeneratedValue(strategy = GenerationType.IDENTITY) public Long getId() { return id; } public void setId(Long id) { this.id = id; } /** * @return the credit */ @Column(name = "CREDIT", nullable = false) public Double getCredit() { return credit; } /** * @param credit the credit to set */ public void setCredit(Double credit) { this.credit = credit; } /** * @return the debit */ @Column(name = "DEBIT", nullable = false) public Double getDebit() { return debit; } /** * @param debit the debit to set */ public void setDebit(Double debit) { this.debit = debit; } /** * @return the date */ @Column(name = "DATE_OPERATION", nullable = false) public Date getDate() { return date; } /** * @param date the date to set */ public void setDate(Date date) { this.date = date; } /** * @return the description */ @Column(name = "DESCRIPTION", nullable = false) public String getDescription() { return description; } /** * @param description the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the pointedTransaction */ @Column(name = "OPERATION_POINTEE", nullable = false) public Boolean getPointedTransaction() { return pointedTransaction; } /** * @param pointedTransaction the pointedTransaction to set */ public void setPointedTransaction(Boolean pointedTransaction) { this.pointedTransaction = pointedTransaction; } /** * @return the transfertNumber */ @Column(name = "NUMERO_VIREMENT", nullable = true) public String getTransfertNumber() { return transfertNumber; } /** * @param transfertNumber the transfertNumber to set */ public void setTransfertNumber(String transfertNumber) { this.transfertNumber = transfertNumber; } /** * @return the chequeNumber */ @Column(name = "NUMERO_CHEQUE", nullable = true) public String getChequeNumber() { return chequeNumber; } /** * @param chequeNumber the chequeNumber to set */ public void setChequeNumber(String chequeNumber) { this.chequeNumber = chequeNumber; } /** * @return the slipNumber */ @Column(name = "NUMERO_BORDEREAU", nullable = true) public String getSlipNumber() { return slipNumber; } /** * @param slipNumber the slipNumber to set */ public void setSlipNumber(String slipNumber) { this.slipNumber = slipNumber; } /** * @return the account */ @ManyToOne @JoinColumn(name="COMPTE_ID", nullable = false) public Account getAccount() { return account; } /** * @param account the account to set */ public void setAccount(Account account) { this.account = account; } /** * @return the category */ @ManyToOne @JoinColumn(name="CATEGORY_ID", nullable = false) public Category getCategory() { return category; } /** * @param category the category to set */ public void setCategory(Category category) { this.category = category; } /** * @return the thirdParty */ @ManyToOne @JoinColumn(name="TIERS_ID", nullable = false) public ThirdParty getThirdParty() { return thirdParty; } /** * @param thirdParty the thirdParty to set */ public void setThirdParty(ThirdParty thirdParty) { this.thirdParty = thirdParty; } }
/** * Copyright CSIRO Australian e-Health Research Centre (http://aehrc.com). * All rights reserved. Use is subject to license terms and conditions. */ package au.csiro.snorocket.core; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.HashSet; import java.util.Set; import org.junit.Test; import au.csiro.ontology.Node; import au.csiro.ontology.model.Axiom; import au.csiro.ontology.model.Concept; import au.csiro.ontology.model.ConceptInclusion; import au.csiro.ontology.model.Conjunction; import au.csiro.ontology.model.Datatype; import au.csiro.ontology.model.Existential; import au.csiro.ontology.model.IntegerLiteral; import au.csiro.ontology.model.NamedConcept; import au.csiro.ontology.model.NamedFeature; import au.csiro.ontology.model.NamedRole; import au.csiro.ontology.model.Operator; import au.csiro.ontology.model.Role; import au.csiro.ontology.model.RoleInclusion; import au.csiro.snorocket.core.axioms.Inclusion; /** * Main unit tests for Snorocket. * * @author Alejandro Metke * */ public class TestNormalisedOntology { /** * Tests the simple example found in the paper "Efficient Reasoning in EL+". */ @Test public void testEndocarditis() { // Create roles NamedRole contIn = new NamedRole("cont-in"); NamedRole partOf = new NamedRole("part-of"); NamedRole hasLoc = new NamedRole("has-loc"); NamedRole actsOn = new NamedRole("acts-on"); // Create concepts NamedConcept endocardium = new NamedConcept("Endocardium"); NamedConcept tissue = new NamedConcept("Tissue"); NamedConcept heartWall = new NamedConcept("HeartWall"); NamedConcept heartValve = new NamedConcept("HeartValve"); NamedConcept bodyWall = new NamedConcept("BodyWall"); NamedConcept heart = new NamedConcept("Heart"); NamedConcept bodyValve = new NamedConcept("BodyValve"); NamedConcept endocarditis = new NamedConcept("Endocarditis"); NamedConcept inflammation = new NamedConcept("Inflammation"); NamedConcept disease = new NamedConcept("Disease"); NamedConcept heartdisease = new NamedConcept("Heartdisease"); NamedConcept criticalDisease = new NamedConcept("CriticalDisease"); // Create axioms ConceptInclusion a1 = new ConceptInclusion(endocardium, new Conjunction(new Concept[] { tissue, new Existential(contIn, heartWall), new Existential(contIn, heartValve) })); ConceptInclusion a2 = new ConceptInclusion(heartWall, new Conjunction( new Concept[] { bodyWall, new Existential(partOf, heart) })); ConceptInclusion a3 = new ConceptInclusion(heartValve, new Conjunction( new Concept[] { bodyValve, new Existential(partOf, heart) })); ConceptInclusion a4 = new ConceptInclusion(endocarditis, new Conjunction(new Concept[] { inflammation, new Existential(hasLoc, endocardium) })); ConceptInclusion a5 = new ConceptInclusion(inflammation, new Conjunction(new Concept[] { disease, new Existential(actsOn, tissue) })); ConceptInclusion a6 = new ConceptInclusion( new Conjunction(new Concept[] { heartdisease, new Existential(hasLoc, heartValve) }), criticalDisease); ConceptInclusion a7 = new ConceptInclusion(heartdisease, new Conjunction(new Concept[] { disease, new Existential(hasLoc, heart) })); ConceptInclusion a8 = new ConceptInclusion( new Conjunction(new Concept[] { disease, new Existential(hasLoc, heart) }), heartdisease); RoleInclusion a9 = new RoleInclusion(new Role[] { partOf, partOf }, partOf); RoleInclusion a10 = new RoleInclusion(partOf, contIn); RoleInclusion a11 = new RoleInclusion(new Role[] { hasLoc, contIn }, hasLoc); Set<Axiom> axioms = new HashSet<Axiom>(); axioms.add(a1); axioms.add(a2); axioms.add(a3); axioms.add(a4); axioms.add(a5); axioms.add(a6); axioms.add(a7); axioms.add(a8); axioms.add(a9); axioms.add(a10); axioms.add(a11); // Classify IFactory factory = new CoreFactory(); NormalisedOntology o = new NormalisedOntology(factory, axioms); int total = factory.getTotalConcepts(); for(int i = 2; i < total; i++) { Object ob = factory.lookupConceptId(i); String str = ob.toString(); System.out.println(i+ "->"+str); } total = factory.getTotalRoles(); for(int i = 0; i < total; i++) { System.out.println(i+ "->"+factory.lookupRoleId(i).toString()); } total = factory.getTotalFeatures(); for(int i = 0; i < total; i++) { System.out.println(i+ "->"+factory.lookupFeatureId(i).toString()); } o.classify(); // Build taxonomy o.buildTaxonomy(); // Test results Node bottomNode = o.getBottomNode(); Set<Node> bottomRes = bottomNode.getParents(); //assertTrue(bottomRes.size() == 5); assertTrue(bottomRes.contains(o.getEquivalents(endocardium.getId()))); assertTrue(bottomRes.contains(o.getEquivalents(endocarditis.getId()))); assertTrue(bottomRes.contains(o.getEquivalents(heartWall.getId()))); assertTrue(bottomRes.contains(o.getEquivalents(heartValve.getId()))); assertTrue(bottomRes.contains(o.getEquivalents(heart.getId()))); Node endocarditisNode = o.getEquivalents(endocarditis.getId()); Set<Node> endocarditisRes = endocarditisNode.getParents(); assertTrue(endocarditisRes.size() == 3); assertTrue(endocarditisRes.contains(o.getEquivalents(inflammation.getId()))); assertTrue(endocarditisRes.contains(o.getEquivalents(heartdisease.getId()))); assertTrue(endocarditisRes.contains(o.getEquivalents(criticalDisease.getId()))); Node inflammationNode = o.getEquivalents(inflammation.getId()); Set<Node> inflammationRes = inflammationNode.getParents(); assertTrue(inflammationRes.size() == 1); assertTrue(inflammationRes.contains(o.getEquivalents(disease.getId()))); Node endocardiumNode = o.getEquivalents(endocardium.getId()); Set<Node> endocardiumRes = endocardiumNode.getParents(); assertTrue(endocardiumRes.size() == 1); assertTrue(endocardiumRes.contains(o.getEquivalents(tissue.getId()))); Node heartdiseaseNode = o.getEquivalents(heartdisease.getId()); Set<Node> heartdiseaseRes = heartdiseaseNode.getParents(); assertTrue(heartdiseaseRes.size() == 1); assertTrue(heartdiseaseRes.contains(o.getEquivalents(disease.getId()))); Node heartWallNode = o.getEquivalents(heartWall.getId()); Set<Node> heartWallRes = heartWallNode.getParents(); assertTrue(heartWallRes.size() == 1); assertTrue(heartWallRes.contains(o.getEquivalents(bodyWall.getId()))); Node heartValveNode = o.getEquivalents(heartValve.getId()); Set<Node> heartValveRes = heartValveNode.getParents(); assertTrue(heartValveRes.size() == 1); assertTrue(heartValveRes.contains(o.getEquivalents(bodyValve.getId()))); Node diseaseNode = o.getEquivalents(disease.getId()); Set<Node> diseaseRes = diseaseNode.getParents(); assertTrue(diseaseRes.size() == 1); assertTrue(diseaseRes.contains(o.getTopNode())); Node tissueNode = o.getEquivalents(tissue.getId()); Set<Node> tissueRes = tissueNode.getParents(); assertTrue(tissueRes.size() == 1); assertTrue(tissueRes.contains(o.getTopNode())); Node heartNode = o.getEquivalents(heart.getId()); Set<Node> heartRes = heartNode.getParents(); assertTrue(heartRes.size() == 1); assertTrue(heartRes.contains(o.getTopNode())); Node bodyValveNode = o.getEquivalents(bodyValve.getId()); Set<Node> bodyValveRes = bodyValveNode.getParents(); assertTrue(bodyValveRes.size() == 1); assertTrue(bodyValveRes.contains(o.getTopNode())); Node bodyWallNode = o.getEquivalents(bodyWall.getId()); Set<Node> bodyWallRes = bodyWallNode.getParents(); assertTrue(bodyWallRes.size() == 1); assertTrue(bodyWallRes.contains(o.getTopNode())); Node criticalDiseaseNode = o.getEquivalents(criticalDisease.getId()); Set<Node> criticalDiseaseRes = criticalDiseaseNode.getParents(); assertTrue(criticalDiseaseRes.size() == 1); assertTrue(criticalDiseaseRes.contains(o.getTopNode())); } @Test public void testNormalise() { IFactory factory = new CoreFactory(); // Add roles NamedRole container = new NamedRole("container"); NamedRole contains = new NamedRole("contains"); // Add features NamedFeature mgPerTablet = new NamedFeature("mgPerTablet"); // Add concepts NamedConcept panadol = new NamedConcept("Panadol"); NamedConcept panadol_250mg = new NamedConcept("Panadol_250mg"); NamedConcept panadol_500mg = new NamedConcept("Panadol_500mg"); NamedConcept panadol_pack_250mg = new NamedConcept("Panadol_pack_250mg"); NamedConcept paracetamol = new NamedConcept("Paracetamol"); NamedConcept bottle = new NamedConcept("Bottle"); // Add axioms ConceptInclusion a1 = new ConceptInclusion(panadol, new Existential(contains, paracetamol)); ConceptInclusion a2 = new ConceptInclusion(panadol_250mg, new Conjunction(new Concept[] { panadol, new Datatype(mgPerTablet, Operator.EQUALS, new IntegerLiteral(250)) })); ConceptInclusion a3 = new ConceptInclusion(new Conjunction( new Concept[] { panadol, new Datatype(mgPerTablet, Operator.EQUALS, new IntegerLiteral(250)) }), panadol_250mg); ConceptInclusion a4 = new ConceptInclusion(panadol_500mg, new Conjunction(new Concept[] { panadol, new Datatype(mgPerTablet, Operator.EQUALS, new IntegerLiteral(500)) })); ConceptInclusion a5 = new ConceptInclusion(new Conjunction( new Concept[] { panadol, new Datatype(mgPerTablet, Operator.EQUALS, new IntegerLiteral(500)) }), panadol_500mg); ConceptInclusion a6 = new ConceptInclusion(panadol_pack_250mg, new Conjunction(new Concept[] { panadol, new Datatype(mgPerTablet, Operator.EQUALS, new IntegerLiteral(250)), new Existential(container, bottle) })); Set<Axiom> axioms = new HashSet<Axiom>(); axioms.add(a1); axioms.add(a2); axioms.add(a3); axioms.add(a4); axioms.add(a5); axioms.add(a6); NormalisedOntology no = new NormalisedOntology(factory); Set<Inclusion> norms = no.normalise(axioms); for (Inclusion norm : norms) { System.out.println(norm.getNormalForm().toString()); } // Not much of a test ;) assertEquals(12, norms.size()); } /** * Tests incremental classification functionality for correctness by doing * the following: * * <ol> * <li>Two axioms are removed from the Endocarditis ontology (see axioms * below).</li> * <li>This ontology is classified.</li> * <li>The axioms that were removed are added programmatically to the * ontology.</li> * <li>The new ontology is reclassified.</li> * <li>The results are compared to the original ground truth.</li> * </ol> * * Declaration(Class(:Endocardium)) Declaration(Class(:Endocarditis)) * * SubClassOf( :Endocardium ObjectIntersectionOf( :Tissue * ObjectSomeValuesFrom(:cont-in :HeartWall) ObjectSomeValuesFrom(:cont-in * :HeartValve) ) ) * * SubClassOf( :Endocarditis ObjectIntersectionOf( :Inflammation * ObjectSomeValuesFrom(:has-loc :Endocardium) ) ) */ @Test public void testEndocarditisIncremental() { IFactory factory = new CoreFactory(); // Original Endocarditis ontology axioms NamedRole contIn = new NamedRole("cont-in"); NamedRole partOf = new NamedRole("part-of"); NamedRole hasLoc = new NamedRole("has-loc"); NamedRole actsOn = new NamedRole("acts-on"); NamedConcept tissue = new NamedConcept("Tissue"); NamedConcept heartWall = new NamedConcept("HeartWall"); NamedConcept heartValve = new NamedConcept("HeartValve"); NamedConcept bodyWall = new NamedConcept("BodyWall"); NamedConcept heart = new NamedConcept("Heart"); NamedConcept bodyValve = new NamedConcept("BodyValve"); NamedConcept inflammation = new NamedConcept("Inflammation"); NamedConcept disease = new NamedConcept("Disease"); NamedConcept heartdisease = new NamedConcept("Heartdisease"); NamedConcept criticalDisease = new NamedConcept("CriticalDisease"); ConceptInclusion a2 = new ConceptInclusion(heartWall, new Conjunction( new Concept[] { bodyWall, new Existential(partOf, heart) })); ConceptInclusion a3 = new ConceptInclusion(heartValve, new Conjunction( new Concept[] { bodyValve, new Existential(partOf, heart) })); ConceptInclusion a5 = new ConceptInclusion(inflammation, new Conjunction(new Concept[] { disease, new Existential(actsOn, tissue) })); ConceptInclusion a6 = new ConceptInclusion(new Conjunction( new Concept[] { heartdisease, new Existential(hasLoc, heartValve) }), criticalDisease); ConceptInclusion a7 = new ConceptInclusion(heartdisease, new Conjunction(new Concept[] { disease, new Existential(hasLoc, heart) })); ConceptInclusion a8 = new ConceptInclusion( new Conjunction(new Concept[] { disease, new Existential(hasLoc, heart) }), heartdisease); RoleInclusion a9 = new RoleInclusion(new Role[] { partOf, partOf }, partOf); RoleInclusion a10 = new RoleInclusion(partOf, contIn); RoleInclusion a11 = new RoleInclusion(new Role[] { hasLoc, contIn }, hasLoc); // Partial ontology Set<Axiom> axioms = new HashSet<Axiom>(); axioms.add(a2); axioms.add(a3); axioms.add(a5); axioms.add(a6); axioms.add(a7); axioms.add(a8); axioms.add(a9); axioms.add(a10); axioms.add(a11); NormalisedOntology o = new NormalisedOntology(factory, axioms); o.classify(); o.buildTaxonomy(); // Add delta axioms and classify incrementally NamedConcept endocardium = new NamedConcept("Endocardium"); NamedConcept endocarditis = new NamedConcept("Endocarditis"); ConceptInclusion a1 = new ConceptInclusion(endocardium, new Conjunction(new Concept[] { tissue, new Existential(contIn, heartWall), new Existential(contIn, heartValve) })); ConceptInclusion a4 = new ConceptInclusion(endocarditis, new Conjunction(new Concept[] { inflammation, new Existential(hasLoc, endocardium) })); Set<Axiom> incAxioms = new HashSet<Axiom>(); incAxioms.add(a1); incAxioms.add(a4); o.loadIncremental(incAxioms); o.classifyIncremental(); o.buildTaxonomy(); // Test results Node bottomNode = o.getBottomNode(); Set<Node> bottomRes = bottomNode.getParents(); assertTrue(bottomRes.size() == 5); assertTrue(bottomRes.contains(o.getEquivalents(endocardium.getId()))); assertTrue(bottomRes.contains(o.getEquivalents(endocarditis.getId()))); assertTrue(bottomRes.contains(o.getEquivalents(heartWall.getId()))); assertTrue(bottomRes.contains(o.getEquivalents(heartValve.getId()))); assertTrue(bottomRes.contains(o.getEquivalents(heart.getId()))); Node endocarditisNode = o.getEquivalents(endocarditis.getId()); Set<Node> endocarditisRes = endocarditisNode.getParents(); assertTrue(endocarditisRes.size() == 3); assertTrue(endocarditisRes.contains(o.getEquivalents(inflammation.getId()))); assertTrue(endocarditisRes.contains(o.getEquivalents(heartdisease.getId()))); assertTrue(endocarditisRes.contains(o.getEquivalents(criticalDisease.getId()))); Node inflammationNode = o.getEquivalents(inflammation.getId()); Set<Node> inflammationRes = inflammationNode.getParents(); assertTrue(inflammationRes.size() == 1); assertTrue(inflammationRes.contains(o.getEquivalents(disease.getId()))); Node endocardiumNode = o.getEquivalents(endocardium.getId()); Set<Node> endocardiumRes = endocardiumNode.getParents(); assertTrue(endocardiumRes.size() == 1); assertTrue(endocardiumRes.contains(o.getEquivalents(tissue.getId()))); Node heartdiseaseNode = o.getEquivalents(heartdisease.getId()); Set<Node> heartdiseaseRes = heartdiseaseNode.getParents(); assertTrue(heartdiseaseRes.size() == 1); assertTrue(heartdiseaseRes.contains(o.getEquivalents(disease.getId()))); Node heartWallNode = o.getEquivalents(heartWall.getId()); Set<Node> heartWallRes = heartWallNode.getParents(); assertTrue(heartWallRes.size() == 1); assertTrue(heartWallRes.contains(o.getEquivalents(bodyWall.getId()))); Node heartValveNode = o.getEquivalents(heartValve.getId()); Set<Node> heartValveRes = heartValveNode.getParents(); assertTrue(heartValveRes.size() == 1); assertTrue(heartValveRes.contains(o.getEquivalents(bodyValve.getId()))); Node diseaseNode = o.getEquivalents(disease.getId()); Set<Node> diseaseRes = diseaseNode.getParents(); assertTrue(diseaseRes.size() == 1); assertTrue(diseaseRes.contains(o.getTopNode())); Node tissueNode = o.getEquivalents(tissue.getId()); Set<Node> tissueRes = tissueNode.getParents(); assertTrue(tissueRes.size() == 1); assertTrue(tissueRes.contains(o.getTopNode())); Node heartNode = o.getEquivalents(heart.getId()); Set<Node> heartRes = heartNode.getParents(); assertTrue(heartRes.size() == 1); assertTrue(heartRes.contains(o.getTopNode())); Node bodyValveNode = o.getEquivalents(bodyValve.getId()); Set<Node> bodyValveRes = bodyValveNode.getParents(); assertTrue(bodyValveRes.size() == 1); assertTrue(bodyValveRes.contains(o.getTopNode())); Node bodyWallNode = o.getEquivalents(bodyWall.getId()); Set<Node> bodyWallRes = bodyWallNode.getParents(); assertTrue(bodyWallRes.size() == 1); assertTrue(bodyWallRes.contains(o.getTopNode())); Node criticalDiseaseNode = o.getEquivalents(criticalDisease.getId()); Set<Node> criticalDiseaseRes = criticalDiseaseNode.getParents(); assertTrue(criticalDiseaseRes.size() == 1); assertTrue(criticalDiseaseRes.contains(o.getTopNode())); } }
package com.thunderwarn.thunderwarn.common.slider; import android.content.Context; import android.support.v4.view.PagerAdapter; import android.support.v4.view.ViewPager; import android.util.AttributeSet; import android.view.Gravity; import android.view.View; import android.widget.FrameLayout; import android.widget.HorizontalScrollView; import android.widget.ImageView; import android.widget.LinearLayout; import com.thunderwarn.thunderwarn.R; import com.thunderwarn.thunderwarn.common.SharedResources; import com.thunderwarn.thunderwarn.common.configuration.LayoutManager; public class SlidingBulletLayout extends HorizontalScrollView { private static final int SIZE_NORMAL = 12; private static final int SIZE_SELECTED = 36; private LayoutManager layoutManager = LayoutManager.getInstance(); private SharedResources sharedResources = SharedResources.getInstance(); private int position = 0; /** * Allows complete control over the colors drawn in the tab layout. Set with */ public interface TabColorizer { /** * @return return the color of the indicator used when {@code position} is selected. */ int getIndicatorColor(int position); /** * @return return the color of the divider drawn to the right of {@code position}. */ int getDividerColor(int position); } private static final int TITLE_OFFSET_DIPS = 24; private static final int TAB_VIEW_PADDING_DIPS = 16; private static final int TAB_VIEW_TEXT_SIZE_SP = 12; private int mTitleOffset; private int mTabViewLayoutId; private int mTabViewTextViewId; private ViewPager mViewPager; private ViewPager.OnPageChangeListener mViewPagerPageChangeListener; private LinearLayout bullets; public SlidingBulletLayout(Context context) { this(context, null); } public SlidingBulletLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public SlidingBulletLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); // Disable the Scroll Bar setHorizontalScrollBarEnabled(false); // Make sure that the Tab Strips fills this View setFillViewport(true); mTitleOffset = (int) (TITLE_OFFSET_DIPS * getResources().getDisplayMetrics().density); this.bullets = new LinearLayout(context); this.addView(this.bullets, LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT); } public void setPosition(int position) { this.position = position; } /** * Sets the associated view pager. Note that the assumption here is that the pager content * (number of tabs and tab titles) does not change after this call has been made. */ public void setViewPager(ViewPager viewPager) { this.bullets.removeAllViews(); mViewPager = viewPager; if (viewPager != null) { viewPager.setOnPageChangeListener(new InternalViewPagerListener()); populateTabStrip(); } } private void populateTabStrip() { final PagerAdapter adapter = mViewPager.getAdapter(); int count = adapter.getCount(); for (int i = 0; i < count ; i++) { ImageView tabView = createBullet(false); this.bullets.addView(tabView); } FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) this.bullets.getLayoutParams(); if(params == null){ params = new FrameLayout.LayoutParams(FrameLayout.LayoutParams.WRAP_CONTENT, FrameLayout.LayoutParams.WRAP_CONTENT); } params.setMargins(0,20,0,0); this.bullets.setLayoutParams(params); this.bullets.setGravity(Gravity.CENTER); } private ImageView createBullet(boolean selected) { final View.OnClickListener tabClickListener = new TabClickListener(); ImageView tabView = new ImageView(sharedResources.getContext()); tabView.setScaleType(ImageView.ScaleType.FIT_XY); tabView.setMaxHeight(7); tabView.setMaxWidth(7); tabView.setBackgroundColor(LayoutManager.getInstance().getSmoothForegroundColor()); tabView.setOnClickListener(tabClickListener); LinearLayout.LayoutParams params = (LinearLayout.LayoutParams) tabView.getLayoutParams(); if(params == null){ params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT); } params.setMargins(5,5,5,5); tabView.setLayoutParams(params); // Choose the image file tabView.setImageResource(R.drawable.bb_slider); if(selected){ tabView.setImageResource(R.drawable.bb_slider_selected); } return tabView; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); if (mViewPager != null) { scrollToTab(this.position, 0); } } private void scrollToTab(int tabIndex, int positionOffset) { final View.OnClickListener tabClickListener = new TabClickListener(); final int tabStripChildCount = this.bullets.getChildCount(); if (tabStripChildCount == 0 || tabIndex < 0 || tabIndex >= tabStripChildCount) { return; } // Clear the panel boolean isEmpty = this.bullets.getChildCount() == 0; // Put everything normal for (int i=0 ; i < tabStripChildCount ; i++){ boolean selected = false; if(i == tabIndex){ selected = true; } if(isEmpty) { ImageView tabView = createBullet(selected); this.bullets.addView(tabView); }else if(this.bullets.getChildCount() > i){ View view = this.bullets.getChildAt(i); if(view instanceof ImageView) { ImageView tabView = (ImageView) view; if(selected){ tabView.setImageResource(R.drawable.bb_slider_selected); }else{ tabView.setImageResource(R.drawable.bb_slider); } } } } } private class InternalViewPagerListener implements ViewPager.OnPageChangeListener { private int mScrollState; private boolean inited = false; @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { int tabStripChildCount = SlidingBulletLayout.this.bullets.getChildCount(); if ((tabStripChildCount == 0) || (position < 0) || (position >= tabStripChildCount)) { return; } View selectedTitle = SlidingBulletLayout.this.bullets.getChildAt(position); int extraOffset = (selectedTitle != null) ? (int) (positionOffset * selectedTitle.getWidth()) : 0; scrollToTab(position, extraOffset); inited = true; if (mViewPagerPageChangeListener != null) { mViewPagerPageChangeListener.onPageScrolled(position, positionOffset, positionOffsetPixels); } } @Override public void onPageScrollStateChanged(int state) { mScrollState = state; if (mViewPagerPageChangeListener != null) { mViewPagerPageChangeListener.onPageScrollStateChanged(state); } } @Override public void onPageSelected(int position) { if (mScrollState == ViewPager.SCROLL_STATE_IDLE) { scrollToTab(position, 0); } if (mViewPagerPageChangeListener != null) { mViewPagerPageChangeListener.onPageSelected(position); } } } private class TabClickListener implements View.OnClickListener { @Override public void onClick(View v) { for (int i = 0; i < bullets.getChildCount(); i++) { if (v == bullets.getChildAt(i)) { mViewPager.setCurrentItem(i); return; } } } } }
//@author A0112918H package com.epictodo.controller.worker; import com.epictodo.controller.logic.CRUDLogic; import com.epictodo.model.task.Task; import java.util.ArrayList; public class WorkDistributor { private static final String MSG_ENTER_COMMAND = "Please Enter Command"; private static CRUDLogic _logic = new CRUDLogic(); private final static String[] COMMAND_EXIT = {"exit", "quit"}; private final static String[] COMMAND_ADD = {"add", "create", "+"}; private final static String[] COMMAND_UPDATE = {"update", "change", "modify"}; private final static String[] COMMAND_DELETE = {"delete", "remove", "-"}; private final static String[] COMMAND_SEARCH = {"search", "find", "ls"}; private final static String[] COMMAND_DISPLAY = {"display", "upcoming"}; private final static String[] COMMAND_DISPLAYALL = {"all", "displayall", "showall"}; private final static String[] COMMAND_UNDO = {"undo", "revert"}; private final static String[] COMMAND_REDO = {"redo"}; private final static String[] COMMAND_DONE = {"done", "mark"}; private final static String[] COMMAND_HELP = {"help", "?"}; private static final String MSG_HELP = "insert [Command]+[Instruction]\nCommandTypes are:\n\t\t1.Add/Create\n\t\t2.Search/Find\n\t\t3.Update/Change/Modify\n\t\t4.Display/Upcoming\n\t\t5.Undo/Revert\n\t\t6.Redo\n\t\t7.Displayall"; private final static String MSG_INVALID_INPUT = "invalid input"; enum CommandType { DISPLAY, DISPLAYALL, ADD, DELETE, UPDATE, SEARCH, EXIT, INVALID, NULL, UNDO, REDO, DONE, HELP } enum KeywordType { WORD, TIME, OPTION } /** * Return true if the storage is detected and loaded * If error or files not found, false is returned. * * @param zone Zone of position. * @return Lateral location. * @throws IllegalArgumentException If zone is <= 0. */ public static boolean loadData() { try { return _logic.loadFromFile(); } catch (Exception ex) { return false; } } /** * Return message after every command is operated * If command is not operated successfully, "invalid input" is returned. * * @param input User input * @return Operation result Message. */ public static String processCommand(String input) { String result = MSG_ENTER_COMMAND; ArrayList<Task> list = null; Task t = null; // Clear expired timed tasks _logic.clearExpiredTask(); CommandType command = defineCommandType(input); input = getInstruction(input); switch (command) { case DISPLAY: return _logic.displayIncompleteTaskList(); case DISPLAYALL: return _logic.displayAllTaskList(); case ADD: t = CommandWorker.createTask(input); result = _logic.createTask(t); return result; case DELETE: case DONE: case UPDATE: case SEARCH: list = searchThroughKeywords(input); if (list.size() == 0) { return "Cannnot find '" + input + "'"; } result = selectItemProcess(list, command); return result; case EXIT: System.exit(0); break; case UNDO: result = _logic.undoMostRecent(); return result; case REDO: result = _logic.redoMostRecent(); return result; case HELP: result = MSG_HELP; return result; case INVALID: // todo: defined all invalid cases return MSG_INVALID_INPUT; default: break; } // todo handle invalid input here return result; } /** * Calls MenuWorker to prompt for user input * Return operation result message * * @param list list of possible option from the search result * @param commandType Defined user command type * @return result message. */ private static String selectItemProcess(ArrayList<Task> list, CommandType commandType) { String result = null; Task tempTask = null; try { tempTask = MenuWorker.selectItemFromList(commandType, list, _logic.convertListToString(list)); } catch (IndexOutOfBoundsException iobe) { return MSG_INVALID_INPUT; } result = processCommand(commandType, tempTask); return result; } /** * Calls CRUDLogic class to process the task * Return proper result message given from CRUDLogic * * @param command System defined command type * @param task Y coordinate of position. * @return Operation result message */ private static String processCommand(CommandType command, Task task) { String result = ""; switch (command) { case DELETE: result = _logic.deleteTask(task); break; case DONE: result = _logic.markAsDone(task); break; case UPDATE: Task updatedTask = MenuWorker.updateTask(task); if (updatedTask != null) { result = _logic.updateTask(task, updatedTask); } break; case SEARCH: result = _logic.searchDetail(task); break; default: break; } return result; } /** * Calls CRUDLogic search by using the keywords * Return a list of tasks from the searches * * @param keyword the key words from user input * @return list of possible tasks base on the search result. */ private static ArrayList<Task> searchThroughKeywords(String keyword) { ArrayList<Task> list = new ArrayList<Task>(); Task tempTask = _logic.translateWorkingListId(keyword); String date = null; if (keyword.length() != 2) { date = CommandWorker.getDateViaNlp(keyword); } KeywordType keywordType = getKeywordType(tempTask, date); switch (keywordType) { case WORD: list = _logic.getTasksByName(keyword); break; case TIME: list = _logic.getTasksByDate(date); break; case OPTION: list.add(tempTask); break; } return list; } /** * Return CommandType base on the command given from the input * * @param input user input * @return Command type */ private static CommandType defineCommandType(String input) { //retrieve command key from the user input String command = getCommand(input); //match them with proper command type if (compareString(command, "")) return CommandType.NULL; else if (matchCommand(command, COMMAND_ADD)) { return CommandType.ADD; } else if (matchCommand(command, COMMAND_DELETE)) { return CommandType.DELETE; } else if (matchCommand(command, COMMAND_UPDATE)) { return CommandType.UPDATE; } else if (matchCommand(command, COMMAND_SEARCH)) { return CommandType.SEARCH; } else if (matchCommand(command, COMMAND_DISPLAY)) { return CommandType.DISPLAY; } else if (matchCommand(command, COMMAND_DISPLAYALL)) { return CommandType.DISPLAYALL; } else if (matchCommand(command, COMMAND_EXIT)) { return CommandType.EXIT; } else if (matchCommand(command, COMMAND_UNDO)) { return CommandType.UNDO; } else if (matchCommand(command, COMMAND_REDO)) { return CommandType.REDO; } else if (matchCommand(command, COMMAND_DONE)) { return CommandType.DONE; } else if (matchCommand(command, COMMAND_HELP)) { return CommandType.HELP; } else { return CommandType.INVALID; } } /** * Return true when the command matches with the vocab or its synonyms * if it does not match with each other, false is returned * * @param command System define command. * @param vocabs command key and its synonyms * @return Boolean. */ private static boolean matchCommand(String command, final String[] vocabs) { for (int i = 0; i < vocabs.length; i++) { if (compareString(command, vocabs[i])) { return true; } } return false; } /** * Return true if both strings are the same * * @param text1 First String to be compared. * @param text2 Second String to be compared. * @return true/false */ private static boolean compareString(String text1, String text2) { return (text1.equalsIgnoreCase(text2)); } /** * Return instruction by removing command from user input * * @param input user input * @return instruction * @throws IllegalArgumentException If zone is <= 0. */ private static String getInstruction(String input) { return input.substring(getCommandLength(input), input.length()).trim(); } /** * Return command length. * * @param instruc user's input. * @return commands length. */ private static int getCommandLength(String instruc) { String commandTypeString = instruc.trim().split("\\s+")[0]; return commandTypeString.length(); } /** * Return command type. * * @param instruc user's input. * @return commands. */ private static String getCommand(String instruc) { String commandTypeString = instruc.trim().split("\\s+")[0]; return commandTypeString; } /** * return keyword type * * @param task Task from the option given from user. * @param date date given from NLP. * @return Keyword Type. */ private static KeywordType getKeywordType(Task task, String date) { if (task != null) { return KeywordType.OPTION; } else if (date != null) { return KeywordType.TIME; } else { return KeywordType.WORD; } } }
package com.sporticus.domain.entities; import com.sporticus.domain.interfaces.IEvent; import com.sporticus.util.Utility; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.validation.constraints.NotNull; import java.util.Date; @Entity @Table(name = "event") public class Event implements IEvent { @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; @Temporal(TemporalType.TIMESTAMP) @NotNull private Date created = new Date(); @Temporal(TemporalType.TIMESTAMP) @NotNull private Date dateTime = new Date(); @Temporal(TemporalType.TIMESTAMP) private Date dateTimeEnd = new Date(); @Column(nullable = true) private STATUS status = STATUS.PROPOSED; @Column(nullable = false) private String name; @Column(nullable = true) private String type; @Column(nullable = true) private String description; @Column(nullable = true) private String metaDataType = ""; @Column(nullable = true) private String metaData = ""; @Column(nullable = false) private Long ownerId; public Event() { } public Event(IEvent e) { IEvent.COPY(e,this); } @Override public Long getId() { return id; } public void setId(final Long id) { this.id = id; } @Override public String getType() { return type; } @Override public IEvent setType(String type) { this.type = type; return this; } @Override public String getName() { return name; } @Override public IEvent setName(final String name) { this.name = name; return this; } @Override public String getDescription() { return description; } @Override public IEvent setDescription(final String description) { this.description = description; return this; } @Override public Date getCreated() { return created; } @Override public IEvent setCreated(final Date created) { this.created = new Date(created.getTime()); return this; } @Override public String getCreatedString() { return Utility.format(created); } @Override public Date getDateTime() { return dateTime; } @Override public IEvent setDateTime(Date dateTime) { this.dateTime = dateTime; return this; } @Override public String getDateTimeString() { return Utility.format(dateTime); } @Override public Date getDateTimeEnd() { return dateTimeEnd; } @Override public IEvent setDateTimeEnd(Date dateTimeEnd) { this.dateTimeEnd = dateTimeEnd; return this; } @Override public String getDateTimeEndString() { return Utility.format(dateTimeEnd); } @Override public STATUS getStatus() { return status; } @Override public IEvent setStatus(STATUS status) { this.status = status; return this; } @Override public Long getOwnerId() { return ownerId; } @Override public IEvent setOwnerId(final Long ownerId) { this.ownerId = ownerId; return this; } @Override public String getMetaDataType() { return metaDataType; } @Override public IEvent setMetaDataType(String metaDataType) { this.metaDataType = metaDataType; return this; } @Override public String getMetaData() { return metaData; } @Override public IEvent setMetaData(String metaData) { this.metaData = metaData; return this; } @Override public String toString() { return String.format("Event - ID [%d] Event Name [%s] Type [%s] Owner Id [%d] DateTime [%s]", id, name, type, ownerId, getDateTimeString()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.lookup; import org.apache.lucene.index.*; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.search.EmptyScorer; import java.io.IOException; import java.util.Iterator; /** * Holds all information on a particular term in a field. * */ public class IndexFieldTerm implements Iterable<TermPosition> { // The posting list for this term. Is null if the term or field does not // exist. PostingsEnum postings; // Stores if positions, offsets and payloads are requested. private final int flags; private final String fieldName; private final String term; private final PositionIterator iterator; // for lucene calls private final Term identifier; private final TermStatistics termStats; // get the document frequency of the term public long df() throws IOException { return termStats.docFreq(); } // get the total term frequency of the term, that is, how often does the // term appear in any document? public long ttf() throws IOException { return termStats.totalTermFreq(); } // when the reader changes, we have to get the posting list for this term // and reader void setNextReader(LeafReader reader) { try { // Get the posting list for a specific term. if (!shouldRetrieveFrequenciesOnly()) { postings = getPostings(getLucenePositionsFlags(flags), reader); } if (postings == null) { postings = getPostings(getLuceneFrequencyFlag(flags), reader); if (postings != null) { final PostingsEnum p = postings; postings = new PostingsEnum() { @Override public int freq() throws IOException { return p.freq(); } @Override public int nextPosition() throws IOException { return -1; } @Override public int startOffset() throws IOException { return -1; } @Override public int endOffset() throws IOException { return -1; } @Override public BytesRef getPayload() throws IOException { return null; } @Override public int docID() { return p.docID(); } @Override public int nextDoc() throws IOException { return p.nextDoc(); } @Override public int advance(int target) throws IOException { return p.advance(target); } @Override public long cost() { return p.cost(); } }; } } if (postings == null) { final DocIdSetIterator empty = DocIdSetIterator.empty(); postings = new PostingsEnum() { @Override public int docID() { return empty.docID(); } @Override public int nextDoc() throws IOException { return empty.nextDoc(); } @Override public int advance(int target) throws IOException { return empty.advance(target); } @Override public long cost() { return empty.cost(); } @Override public int freq() throws IOException { return 1; } @Override public int nextPosition() throws IOException { return -1; } @Override public int startOffset() throws IOException { return -1; } @Override public int endOffset() throws IOException { return -1; } @Override public BytesRef getPayload() throws IOException { return null; } }; } } catch (IOException e) { throw new ElasticsearchException("Unable to get posting list for field " + fieldName + " and term " + term, e); } } private boolean shouldRetrieveFrequenciesOnly() { return (flags & ~IndexLookup.FLAG_FREQUENCIES) == 0; } private int getLuceneFrequencyFlag(int flags) { return (flags & IndexLookup.FLAG_FREQUENCIES) > 0 ? PostingsEnum.FREQS : PostingsEnum.NONE; } private int getLucenePositionsFlags(int flags) { int lucenePositionsFlags = PostingsEnum.POSITIONS; lucenePositionsFlags |= (flags & IndexLookup.FLAG_PAYLOADS) > 0 ? PostingsEnum.PAYLOADS : 0x0; lucenePositionsFlags |= (flags & IndexLookup.FLAG_OFFSETS) > 0 ? PostingsEnum.OFFSETS : 0x0; return lucenePositionsFlags; } private PostingsEnum getPostings(int luceneFlags, LeafReader reader) throws IOException { assert identifier.field() != null; assert identifier.bytes() != null; final Fields fields = reader.fields(); PostingsEnum newPostings = null; if (fields != null) { final Terms terms = fields.terms(identifier.field()); if (terms != null) { TermsEnum termsEnum = terms.iterator(null); if (termsEnum.seekExact(identifier.bytes())) { newPostings = termsEnum.postings(reader.getLiveDocs(), postings, luceneFlags); } } } return newPostings; } private int freq = 0; public void setNextDoc(int docId) { assert (postings != null); try { // we try to advance to the current document. int currentDocPos = postings.docID(); if (currentDocPos < docId) { currentDocPos = postings.advance(docId); } if (currentDocPos == docId) { freq = postings.freq(); } else { freq = 0; } iterator.nextDoc(); } catch (IOException e) { throw new ElasticsearchException("While trying to initialize term positions in IndexFieldTerm.setNextDoc() ", e); } } public IndexFieldTerm(String term, String fieldName, IndexLookup indexLookup, int flags) { assert fieldName != null; this.fieldName = fieldName; assert term != null; this.term = term; assert indexLookup != null; identifier = new Term(fieldName, (String) term); this.flags = flags; boolean doRecord = ((flags & IndexLookup.FLAG_CACHE) > 0); if (withPositions()) { if (!doRecord) { iterator = new PositionIterator(this); } else { iterator = new CachedPositionIterator(this); } } else { iterator = new PositionIterator(this); } setNextReader(indexLookup.getReader()); setNextDoc(indexLookup.getDocId()); try { termStats = indexLookup.getIndexSearcher().termStatistics(identifier, TermContext.build(indexLookup.getReaderContext(), identifier)); } catch (IOException e) { throw new ElasticsearchException("Cannot get term statistics: ", e); } } private boolean withPositions() { return shouldRetrievePositions() || shouldRetrieveOffsets() || shouldRetrievePayloads(); } protected boolean shouldRetrievePositions() { return (flags & IndexLookup.FLAG_POSITIONS) > 0; } protected boolean shouldRetrieveOffsets() { return (flags & IndexLookup.FLAG_OFFSETS) > 0; } protected boolean shouldRetrievePayloads() { return (flags & IndexLookup.FLAG_PAYLOADS) > 0; } public int tf() throws IOException { return freq; } @Override public Iterator<TermPosition> iterator() { return iterator.reset(); } /* * A user might decide inside a script to call get with _POSITIONS and then * a second time with _PAYLOADS. If the positions were recorded but the * payloads were not, the user will not have access to them. Therfore, throw * exception here explaining how to call get(). */ public void validateFlags(int flags2) { if ((this.flags & flags2) < flags2) { throw new ElasticsearchException("You must call get with all required flags! Instead of " + getCalledStatement(flags2) + "call " + getCallStatement(flags2 | this.flags) + " once"); } } private String getCalledStatement(int flags2) { String calledFlagsCall1 = getFlagsString(flags); String calledFlagsCall2 = getFlagsString(flags2); String callStatement1 = getCallStatement(calledFlagsCall1); String callStatement2 = getCallStatement(calledFlagsCall2); return " " + callStatement1 + " and " + callStatement2 + " "; } private String getCallStatement(String calledFlags) { return "_index['" + this.fieldName + "'].get('" + this.term + "', " + calledFlags + ")"; } private String getFlagsString(int flags2) { String flagsString = null; if ((flags2 & IndexLookup.FLAG_FREQUENCIES) != 0) { flagsString = anddToFlagsString(flagsString, "_FREQUENCIES"); } if ((flags2 & IndexLookup.FLAG_POSITIONS) != 0) { flagsString = anddToFlagsString(flagsString, "_POSITIONS"); } if ((flags2 & IndexLookup.FLAG_OFFSETS) != 0) { flagsString = anddToFlagsString(flagsString, "_OFFSETS"); } if ((flags2 & IndexLookup.FLAG_PAYLOADS) != 0) { flagsString = anddToFlagsString(flagsString, "_PAYLOADS"); } if ((flags2 & IndexLookup.FLAG_CACHE) != 0) { flagsString = anddToFlagsString(flagsString, "_CACHE"); } return flagsString; } private String anddToFlagsString(String flagsString, String flag) { if (flagsString != null) { flagsString += " | "; } else { flagsString = ""; } flagsString += flag; return flagsString; } private String getCallStatement(int flags2) { String calledFlags = getFlagsString(flags2); String callStatement = getCallStatement(calledFlags); return " " + callStatement + " "; } }
/* * Copyright (c) 2000, 2008, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.nio.channels.spi; import java.io.IOException; import java.nio.channels.*; /** * Base implementation class for selectable channels. * * <p> This class defines methods that handle the mechanics of channel * registration, deregistration, and closing. It maintains the current * blocking mode of this channel as well as its current set of selection keys. * It performs all of the synchronization required to implement the {@link * java.nio.channels.SelectableChannel} specification. Implementations of the * abstract protected methods defined in this class need not synchronize * against other threads that might be engaged in the same operations. </p> * * * @author Mark Reinhold * @author Mike McCloskey * @author JSR-51 Expert Group * @since 1.4 */ public abstract class AbstractSelectableChannel extends SelectableChannel { // The provider that created this channel private final SelectorProvider provider; // Keys that have been created by registering this channel with selectors. // They are saved because if this channel is closed the keys must be // deregistered. Protected by keyLock. // private SelectionKey[] keys = null; private int keyCount = 0; // Lock for key set and count private final Object keyLock = new Object(); // Lock for registration and configureBlocking operations private final Object regLock = new Object(); // Blocking mode, protected by regLock boolean blocking = true; /** * Initializes a new instance of this class. */ protected AbstractSelectableChannel(SelectorProvider provider) { this.provider = provider; } /** * Returns the provider that created this channel. * * @return The provider that created this channel */ public final SelectorProvider provider() { return provider; } // -- Utility methods for the key set -- private void addKey(SelectionKey k) { synchronized (keyLock) { int i = 0; if ((keys != null) && (keyCount < keys.length)) { // Find empty element of key array for (i = 0; i < keys.length; i++) if (keys[i] == null) break; } else if (keys == null) { keys = new SelectionKey[3]; } else { // Grow key array int n = keys.length * 2; SelectionKey[] ks = new SelectionKey[n]; for (i = 0; i < keys.length; i++) ks[i] = keys[i]; keys = ks; i = keyCount; } keys[i] = k; keyCount++; } } private SelectionKey findKey(Selector sel) { synchronized (keyLock) { if (keys == null) return null; for (int i = 0; i < keys.length; i++) if ((keys[i] != null) && (keys[i].selector() == sel)) return keys[i]; return null; } } void removeKey(SelectionKey k) { // package-private synchronized (keyLock) { for (int i = 0; i < keys.length; i++) if (keys[i] == k) { keys[i] = null; keyCount--; } ((AbstractSelectionKey)k).invalidate(); } } private boolean haveValidKeys() { synchronized (keyLock) { if (keyCount == 0) return false; for (int i = 0; i < keys.length; i++) { if ((keys[i] != null) && keys[i].isValid()) return true; } return false; } } // -- Registration -- public final boolean isRegistered() { synchronized (keyLock) { return keyCount != 0; } } public final SelectionKey keyFor(Selector sel) { return findKey(sel); } /** * Registers this channel with the given selector, returning a selection key. * * <p> This method first verifies that this channel is open and that the * given initial interest set is valid. * * <p> If this channel is already registered with the given selector then * the selection key representing that registration is returned after * setting its interest set to the given value. * * <p> Otherwise this channel has not yet been registered with the given * selector, so the {@link AbstractSelector#register register} method of * the selector is invoked while holding the appropriate locks. The * resulting key is added to this channel's key set before being returned. * </p> * * @throws ClosedSelectorException {@inheritDoc} * * @throws IllegalBlockingModeException {@inheritDoc} * * @throws IllegalSelectorException {@inheritDoc} * * @throws CancelledKeyException {@inheritDoc} * * @throws IllegalArgumentException {@inheritDoc} */ public final SelectionKey register(Selector sel, int ops, Object att) throws ClosedChannelException { if (!isOpen()) throw new ClosedChannelException(); if ((ops & ~validOps()) != 0) throw new IllegalArgumentException(); synchronized (regLock) { if (blocking) throw new IllegalBlockingModeException(); SelectionKey k = findKey(sel); if (k != null) { k.interestOps(ops); k.attach(att); } if (k == null) { // New registration k = ((AbstractSelector)sel).register(this, ops, att); addKey(k); } return k; } } // -- Closing -- /** * Closes this channel. * * <p> This method, which is specified in the {@link * AbstractInterruptibleChannel} class and is invoked by the {@link * java.nio.channels.Channel#close close} method, in turn invokes the * {@link #implCloseSelectableChannel implCloseSelectableChannel} method in * order to perform the actual work of closing this channel. It then * cancels all of this channel's keys. </p> */ protected final void implCloseChannel() throws IOException { implCloseSelectableChannel(); synchronized (keyLock) { int count = (keys == null) ? 0 : keys.length; for (int i = 0; i < count; i++) { SelectionKey k = keys[i]; if (k != null) k.cancel(); } } } /** * Closes this selectable channel. * * <p> This method is invoked by the {@link java.nio.channels.Channel#close * close} method in order to perform the actual work of closing the * channel. This method is only invoked if the channel has not yet been * closed, and it is never invoked more than once. * * <p> An implementation of this method must arrange for any other thread * that is blocked in an I/O operation upon this channel to return * immediately, either by throwing an exception or by returning normally. * </p> */ protected abstract void implCloseSelectableChannel() throws IOException; // -- Blocking -- public final boolean isBlocking() { synchronized (regLock) { return blocking; } } public final Object blockingLock() { return regLock; } /** * Adjusts this channel's blocking mode. * * <p> If the given blocking mode is different from the current blocking * mode then this method invokes the {@link #implConfigureBlocking * implConfigureBlocking} method, while holding the appropriate locks, in * order to change the mode. </p> */ public final SelectableChannel configureBlocking(boolean block) throws IOException { if (!isOpen()) throw new ClosedChannelException(); synchronized (regLock) { if (blocking == block) return this; if (block && haveValidKeys()) throw new IllegalBlockingModeException(); implConfigureBlocking(block); blocking = block; } return this; } /** * Adjusts this channel's blocking mode. * * <p> This method is invoked by the {@link #configureBlocking * configureBlocking} method in order to perform the actual work of * changing the blocking mode. This method is only invoked if the new mode * is different from the current mode. </p> * * @throws IOException * If an I/O error occurs */ protected abstract void implConfigureBlocking(boolean block) throws IOException; }
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.modeshape.connector.git; import java.io.ByteArrayOutputStream; import java.io.File; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.ListBranchCommand; import org.eclipse.jgit.api.ListBranchCommand.ListMode; import org.eclipse.jgit.api.ListTagCommand; import org.eclipse.jgit.api.LogCommand; import org.eclipse.jgit.diff.DiffEntry; import org.eclipse.jgit.diff.DiffEntry.ChangeType; import org.eclipse.jgit.diff.DiffFormatter; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.RepositoryBuilder; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.treewalk.TreeWalk; import org.eclipse.jgit.treewalk.filter.PathFilter; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.modeshape.jcr.ExecutionContext; public class GitFunctionalTest { private static Git git; private static Repository repository; private static ExecutionContext context; private static Values values; private boolean print = false; @BeforeClass public static void beforeAll() throws Exception { File gitDir = new File("../../.git"); RepositoryBuilder builder = new RepositoryBuilder(); repository = builder.setGitDir(gitDir).readEnvironment().findGitDir().build(); git = new Git(repository); context = new ExecutionContext(); values = new Values(context.getValueFactories(), context.getBinaryStore()); } @Before public void beforeEach() { print = false; } protected void print( String message ) { if (!print) return; System.out.println(message); } protected void print( String prefix, Object obj ) { if (!print) return; System.out.println(prefix + obj.toString()); } protected void print( String prefix, Object... objects ) { if (!print) return; System.out.print(prefix + ": "); for (Object obj : objects) { System.out.print(obj.toString()); } System.out.println(); } protected void print( RevCommit commit ) { if (!print) return; System.out.println(commit.getId().name()); PersonIdent committer = commit.getCommitterIdent(); PersonIdent author = commit.getAuthorIdent(); System.out.println(" Author = " + author); System.out.println(" Committer = " + committer); System.out.println(" Committed = " + values.dateFrom(commit.getCommitTime())); System.out.println(" Title = " + commit.getShortMessage()); System.out.println(" Message = " + commit.getFullMessage().trim()); System.out.println(" Parents = " + commit.getParents()); } @Test public void shouldGetBranchesWithLocalMode() throws Exception { // print = true; ListMode mode = null; ListBranchCommand command = git.branchList(); command.setListMode(mode); for (Ref ref : command.call()) { String fullName = ref.getName(); String name = fullName.replaceFirst("refs/heads/", ""); print(fullName + " \t--> " + name); } } @Test public void shouldGetBranchesWithAllMode() throws Exception { // print = true; ListMode mode = ListMode.ALL; ListBranchCommand command = git.branchList(); command.setListMode(mode); for (Ref ref : command.call()) { print(ref.getName()); } } @Test public void shouldGetTags() throws Exception { // print = true; ListTagCommand command = git.tagList(); for (Ref ref : command.call()) { String fullName = ref.getName(); String name = fullName.replaceFirst("refs/tags/", ""); print(fullName + " \t--> " + name); } } @Test public void shouldGetFirstDozenCommitsInHistoryForTag() throws Exception { Ref ref = repository.getRef("modeshape-3.0.0.Final"); ref = repository.peel(ref); RevWalk walker = new RevWalk(repository); walker.setRetainBody(true); try { RevCommit commit = walker.parseCommit(ref.getObjectId()); LogCommand command = git.log(); command.add(commit.getId()); command.setMaxCount(12); for (RevCommit rev : command.call()) { commit = walker.parseCommit(rev); print(commit); } } finally { walker.dispose(); } } @Test public void shouldComputeTheDiffOfACommit() throws Exception { // print = true; // Find the commit ... Ref ref = repository.getRef("modeshape-3.0.0.Final"); ref = repository.peel(ref); RevWalk walker = new RevWalk(repository); walker.setRetainBody(true); try { RevCommit commit = walker.parseCommit(ref.getObjectId()); // Set up the tree walk to obtain the difference between the commit and it's parent(s) ... TreeWalk tw = new TreeWalk(repository); tw.setRecursive(true); tw.addTree(commit.getTree()); for (RevCommit parent : commit.getParents()) { RevCommit parentCommit = walker.parseCommit(parent); tw.addTree(parentCommit.getTree()); } // Now process the diff of each file ... for (DiffEntry fileDiff : DiffEntry.scan(tw)) { ChangeType type = fileDiff.getChangeType(); switch (type) { case ADD: String newPath = fileDiff.getNewPath(); print("ADD ", newPath); break; case COPY: newPath = fileDiff.getNewPath(); String origPath = fileDiff.getOldPath(); print("COPY ", origPath, " -> ", newPath); break; case DELETE: origPath = fileDiff.getOldPath(); print("DELETE ", origPath); break; case MODIFY: newPath = fileDiff.getNewPath(); print("MODIFY ", newPath); break; case RENAME: newPath = fileDiff.getNewPath(); origPath = fileDiff.getOldPath(); print("RENAME ", origPath, " -> ", newPath); break; default: // skip break; } ByteArrayOutputStream output = new ByteArrayOutputStream(); DiffFormatter formatter = new DiffFormatter(output); formatter.setRepository(repository); formatter.format(fileDiff); String diff = output.toString("UTF-8"); print(diff); } } finally { walker.dispose(); } } @Test public void shouldGetTopLevelDirectoryContentForCommit() throws Exception { printTreeContent("modeshape-3.0.0.Final", "", true); } @Test public void shouldGetDirectoryContentsAtPathForCommit() throws Exception { printTreeContent("modeshape-3.0.0.Final", "modeshape-jcr/src", true); } @Test public void shouldGetFileInfoAtPathInContent() throws Exception { printTreeContent("modeshape-3.0.0.Final", "modeshape-jcr/src/main/java/org/modeshape/jcr/XmlNodeTypeReader.java", false); } protected void printTreeContent( String tagOrBranchOrCommit, String parentPath, boolean showCommitInfo ) throws Exception { // Find the commit ... ObjectId objId = repository.resolve("modeshape-3.0.0.Final"); RevWalk walker = new RevWalk(repository); if (showCommitInfo) { walker.setRetainBody(true); } try { RevCommit commit = walker.parseCommit(objId); if (showCommitInfo) print(commit); final TreeWalk tw = new TreeWalk(repository); tw.addTree(commit.getTree()); if ("".equals(parentPath) || "/".equals(parentPath)) { // We're already at the top-level tw.setRecursive(false); print("Getting contents of path ..."); while (tw.next()) { print(tw.getPathString()); } } else { PathFilter filter = PathFilter.create(parentPath); tw.setFilter(filter); print("Finding path ..."); while (tw.next()) { print(tw.getPathString()); if (filter.isDone(tw)) { break; } else if (tw.isSubtree()) { tw.enterSubtree(); } } if (tw.isSubtree()) { print("Getting contents of path ..."); tw.enterSubtree(); while (tw.next()) { print(tw.getPathString()); } } else { print("File: " + tw.getPathString()); // Find the commit that last modified this file ... // Is this the most efficient way to do this, 'cuz it's expensive? RevCommit lastCommit = git.log().addPath(parentPath).call().iterator().next(); print("commitMessage", lastCommit.getShortMessage()); PersonIdent authorIdent = lastCommit.getAuthorIdent(); if (authorIdent != null) { print("commiter", authorIdent.getName()); } } } } finally { walker.dispose(); } } }
/* * ============================================================================= * Simplified BSD License, see http://www.opensource.org/licenses/ * ----------------------------------------------------------------------------- * Copyright (c) 2008-2009, Marco Terzer, Zurich, Switzerland * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Swiss Federal Institute of Technology Zurich * nor the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * ============================================================================= */ package ch.javasoft.metabolic.parse; import ch.javasoft.metabolic.MetabolicNetwork; import ch.javasoft.metabolic.impl.DefaultMetabolicNetwork; import ch.javasoft.metabolic.parse.junit.AbstractParseTestCase; /** * A collection of small artificial test networks, mostly taken from literature * samples, being hard-coded in java. */ abstract public class SmallTest extends AbstractParseTestCase { /** * The sample of the "binary approach" paper, pre-compressed */ public void testBinSampleCompact() throws Exception { double[][] values = new double[][] { { 1, 0, -1, -1, -1}, { 0, 1, 0, 1, -1} }; boolean[] reversible = new boolean[] { false, false, false, true, false }; String[] metaNames = new String[] {"B", "C"}; String[] reacNames = new String[] {"R1c", "R2c", "R3c", "R4c", "R5c"}; MetabolicNetwork net = new DefaultMetabolicNetwork(metaNames, reacNames, values, reversible); internalTestOrDelegate(net, null); } /** * The sample of the "binary approach" paper */ public void testBinSample() throws Exception { double[][] values = new double[][] { { 1, -1, -1, 0, 0, 0, 0}, { 0, 1, 0, -1, -1, -1, 0}, { 0, 0, 1, 0, 1, -1, 0}, { 0, 0, 0, 0, 0, 1, -1} }; String[] metaNames = new String[] {"A", "B", "C", "D"}; String[] reacNames = new String[] {"R1", "R2", "R3", "R4", "R5", "R6", "R7"}; boolean[] reversible = new boolean[] { false, false, false, false, true, false, false }; MetabolicNetwork net = new DefaultMetabolicNetwork(metaNames, reacNames, values, reversible); internalTestOrDelegate(net, null); } /** * The sample of the "gemoetry of the flux cone" paper */ public void testGeneratingSample() throws Exception { double[][] values = new double[][] { { 1, -1, 1, 0, 0, 0}, //A { 0, 1, 0, 1, 0, -1}, //B { 0, 0, -1, -1, 1, 0} //C }; boolean[] reversible = new boolean[] { true, true, false, false, false, false }; String[] metaNames = new String[] {"A", "B", "C"}; String[] reacNames = new String[] {"J1", "J2", "J3", "J4", "J5", "J6"}; MetabolicNetwork net = new DefaultMetabolicNetwork(metaNames, reacNames, values, reversible); internalTestOrDelegate(net, null); } /** * The tiny sample of the "System Modeling in Cellular Biology" book */ public void testBookTiny() throws Exception { double[][] values = new double[][] { { 1, -1, -1, 0}, { 0, 1, 1, -1} }; boolean[] reversible = new boolean[] { false, false, false, false }; MetabolicNetwork net = new DefaultMetabolicNetwork(values, reversible); internalTestOrDelegate(net, null); } /** * A simple test case that cause illegal EFMs. The sign of certain flux * values for irreversible reactions was wrong. * <p> * The network is as follows: * <pre> '--> S' '--> E' 'S + E <--> ES' 'ES --> E + P' 'E -->' 'P -->' * </pre> * <p> * The following illegal EFMs were produced * <pre> 0 1 0 0 1 0 1 0 -1 1 0 1 * </pre> * <p> * Note that the problem disappears if the reversible reaction is made * irreversible. It yields the correct EFMs: * <pre> 0 1 0 0 1 0 1 0 1 1 0 1 * </pre> * <p> * The problem was brought up by Markus Uhr. */ public void testSignProblem() throws Exception { double[][] stoich = new double[][] { { 1, 0, -1, 0, 0, 0 }, { 0, 1, -1, 1, -1, 0 }, { 0, 0, 1, -1, 0, 0 }, { 0, 0, 0, 1, 0, -1 } }; String[] mnames = new String[] {"S", "E", "ES", "P"}; String[] rnames = new String[] {"S_up", "E_up", "R_ES", "R_P", "E_ex", "P_ex"}; boolean[] reversible = new boolean[] { false, true, true, false, false, false }; MetabolicNetwork net = new DefaultMetabolicNetwork(mnames, rnames, stoich, reversible); internalTestOrDelegate(net, null); } /** * The sample of the "System Modeling in Cellular Biology" book */ public void testBookSample() throws Exception { double[][] values = new double[][] { { 1.0 , 0.0 , 0.0 , 0.0 , -1.0 , -1.0 , -1.0 , 0.0 , 0.0 , 0.0 },//A { 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , -1.0 , -1.0 , 0.0 },//B { 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 1.0 , 0.0 , 1.0 , 0.0 , -1.0 },//C { 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , -1.0 },//D { 0.0 , 0.0 , 0.0 , -1.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 1.0 },//E { 0.0 , 0.0 , -1.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 1.0 , 1.0 } //P }; boolean[] reversible = new boolean[] { false, true, false, false, false, false, false, true, false, false }; MetabolicNetwork net = new DefaultMetabolicNetwork(values, reversible); internalTestOrDelegate(net, null); } /** * The sample of the "System Modeling in Cellular Biology" book, but all * reactions are reversible */ public void testBookSampleRev() throws Exception { double[][] values = new double[][] { { 1.0 , 0.0 , 0.0 , 0.0 , -1.0 , -1.0 , -1.0 , 0.0 , 0.0 , 0.0 },//A { 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , -1.0 , -1.0 , 0.0 },//B { 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 1.0 , 0.0 , 1.0 , 0.0 , -1.0 },//C { 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 1.0 , 0.0 , 0.0 , -1.0 },//D { 0.0 , 0.0 , 0.0 , -1.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 1.0 },//E { 0.0 , 0.0 , -1.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 1.0 , 1.0 } //P }; boolean[] reversible = new boolean[] { true, true, true, true, true, true, true, true, true, true }; MetabolicNetwork net = new DefaultMetabolicNetwork(values, reversible); internalTestOrDelegate(net, null); } /** * The sample of the "System Modeling in Cellular Biology" book, the * internal reversible reaction was removed */ public void testBookSampleRemRev() throws Exception { double[][] values = new double[][] { { 1.0 , 0.0 , 0.0 , 0.0 , -1.0 , -1.0 , -1.0 ,/* 0.0 ,*/ 0.0 , 0.0 },//A /* { 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , -1.0 , 0.0 , 0.0 },//B */ { 0.0 , 1.0 , 0.0 , 0.0 , 1.0 , 1.0 , 0.0 ,/* 1.0 ,*/ -1.0 , -1.0 },//C { 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 , 1.0 ,/* 0.0 ,*/ 0.0 , -1.0 },//D { 0.0 , 0.0 , 0.0 , -1.0 , 0.0 , 0.0 , 0.0 ,/* 0.0 ,*/ 0.0 , 1.0 },//E { 0.0 , 0.0 , -1.0 , 0.0 , 0.0 , 0.0 , 0.0 ,/* 0.0 ,*/ 1.0 , 1.0 } //P }; boolean[] reversible = new boolean[] { false, true, false, false, false, false, false, /*true,*/ false, false }; MetabolicNetwork net = new DefaultMetabolicNetwork(values, reversible); internalTestOrDelegate(net, null); } /** * The sample of the "System Modeling in Cellular Biology" book, * pre-compressed */ public void testBookMatrixCompressionA() throws Exception { double[][] values = new double[][] { { 1.0 , -1.0 , 1.0 , 0.0}, { 0.0 , -1.0 , 0.0 , 1.0} }; boolean[] reversible = new boolean[] { true, false, false, false }; // String[] metaNames = new String[] {"A", "B", "C", "D", "E", "P"}; // String[] reacNames = new String[] {"R1", "R2", "R3", "R4", "R5", "R6", "R7", "R8", "R9", "R10"}; String[] metaNames = new String[] {"C", "D"}; String[] reacNames = new String[] {"R8", "R10", "R6", "R7"}; MetabolicNetwork net = new DefaultMetabolicNetwork(metaNames, reacNames, values, reversible); internalTestOrDelegate(net, null); } /** * The sample of the "System Modeling in Cellular Biology" book, * pre-compressed */ public void testBookMatrixCompressionB() throws Exception { double[][] values = new double[][] { { 1.0 , -1.0 , 0.0 , 0.0}, { 1.0 , 0.0 , 1.0 , -1.0} }; // String[] metaNames = new String[] {"A", "B", "C", "D", "E", "P"}; // String[] reacNames = new String[] {"R1", "R2", "R3", "R4", "R5", "R6", "R7", "R8", "R9", "R10"}; String[] metaNames = new String[] {"E", "P"}; String[] reacNames = new String[] {"R10", "R4", "R9", "R3"}; boolean[] reversible = new boolean[] { false, false, false, false }; MetabolicNetwork net = new DefaultMetabolicNetwork(metaNames, reacNames, values, reversible); internalTestOrDelegate(net, null); } /** * The irreversible (already extended) example of the paper * "Nullspace Approach to Determine the Elementary Modes of Chemical Reaction Systems" * by C. Wagner, J. Phys. Chem., 2004 */ public void testNullspaceSampleIrrev() throws Exception { double[][] values = new double[][] { { 0, 0, -1, -2, 0, 1, 0, 0, 0, 0, 0, 1, 2}, { 0, -1, 0, 0, 0, 0, 1, -1, 0, 0, 1, 0, 0}, { 1, 0, 0, 2, 0, 0, -1, 1, 0, -1, 0, 0, -2}, { 0, 2, 0, 1, -1, 0, 0, 0, 0, 0, -2, 0, -1}, { 0, 0, 1, -1, 0, 0, 0, 0, -1, 0, 0, -1, 1}, }; boolean[] reversible = new boolean[] { false, false, false, false, false, false, false, false, false, false, false, false, false }; MetabolicNetwork net = new DefaultMetabolicNetwork(values, reversible); internalTestOrDelegate(net, null); } }
package us.kbase.nextgen.dapi; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import java.util.Hashtable; import java.util.List; import java.util.Map; import org.apache.commons.collections15.Transformer; import org.apache.commons.io.FileUtils; import us.kbase.jkidl.IncludeProvider; import us.kbase.jkidl.StaticIncludeProvider; import us.kbase.kidl.KbFuncdef; import us.kbase.kidl.KbList; import us.kbase.kidl.KbMapping; import us.kbase.kidl.KbModule; import us.kbase.kidl.KbModuleComp; import us.kbase.kidl.KbParameter; import us.kbase.kidl.KbScalar; import us.kbase.kidl.KbService; import us.kbase.kidl.KbStruct; import us.kbase.kidl.KbStructItem; import us.kbase.kidl.KbType; import us.kbase.kidl.KbTypeInfo; import us.kbase.kidl.KbTypedef; import us.kbase.kidl.KidlParseException; import us.kbase.kidl.KidlParser; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.databind.JsonMappingException; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.SparseMultigraph; import edu.uci.ics.jung.graph.util.EdgeType; import edu.uci.ics.jung.io.GraphMLWriter; import edu.uci.ics.jung.io.PajekNetWriter; /** * Class to generate type-type and type-method graph for a given specfile or collection of specfiles * @version 1.0 * @author Pavel Novichkov * */ public class TypeMethodGraphGenerator { /** * Whether the type-type should be taking into account */ private static boolean useType2TypeEdges; /** * Whether the method-type should be taking into account */ private static boolean useType2MethodEdges; /** * Map to store registered nodes */ private Map<String,Node> name2node = new Hashtable<String,Node>(); /** * Include provider that provides specDocuments to be included in a given spec via "include" statement */ private StaticIncludeProvider sip; /** * Represents a node in a graph */ static class Node{ static String TYPE_DATA = "D"; static String TYPE_METHOD = "F"; String name; String type; KbTypedef typedef; KbFuncdef func; public Node(KbTypedef typedef){ this.typedef = typedef; name = typedef.getName(); type = TYPE_DATA; } public Node(KbFuncdef func){ this.func = func; name = func.getName(); type = TYPE_METHOD; } public String getName(){ return name; } public String getType(){ return type; } public String toString(){ return type + "." + name; } }; /** * Represents an edge in a graph */ static class Edge{ public static Edge METHOD_PARAM(){ return new Edge("METHOD_PARAM");} public static Edge METHOD_RETURN(){ return new Edge("METHOD_RETURN");} public static Edge SUBTYPE(){ return new Edge("SUBTYPE");} public static Edge LIST_OF(){ return new Edge("LIST_OF");} public static Edge HASH_KEY(){ return new Edge("HASH_KEY");} public static Edge HASH_VALUE(){ return new Edge("HASH_VALUE");} private String name; private Edge(String name){ this.name = name; } public String getName(){ return name; } } /** * Run TypeMethodGraphGenerator * @param graphFileDir * @param specFileDir * @throws Exception */ private void run(File specFileDir, File graphFileDir) throws Exception{ buildIncludeProvider(specFileDir); // processIndividualFiles(specFileDir, graphFileDir); buildSingleGraph(specFileDir, graphFileDir); } /** * Builds include provider * @param cleanFileDir * @throws IOException */ private void buildIncludeProvider(File cleanFileDir) throws IOException { sip = new StaticIncludeProvider(); for(File specFile: cleanFileDir.listFiles()){ if(specFile.getName().endsWith(".spec")){ String fileName = specFile.getName(); String moduleName = fileName.substring(0, fileName.indexOf(".") ); String specDocument = FileUtils.readFileToString(specFile); sip.addSpecFile(moduleName, specDocument); } } } /** * Builds individual graphs for spec files * @param specFileDir * @param graphFileDir */ private void processIndividualFiles(File specFileDir, File graphFileDir){ for(File specFile: specFileDir.listFiles()){ if(specFile.getName().endsWith(".spec")){ processSpecFile(specFile, graphFileDir); } } } /** * Builds a combined graph for all spec files * @param specFileDir * @param graphFileDir */ private void buildSingleGraph(File specFileDir, File graphFileDir){ init(); Graph<Node, Edge> graph = new SparseMultigraph<Node, Edge>(); // Build graph for(File specFile: specFileDir.listFiles()){ if(!specFile.getName().endsWith(".spec")) continue; System.out.print("Doing spec file: " + specFile.getName() + "..."); try{ String specDocument = FileUtils.readFileToString(specFile); populateGraph(graph, specDocument); System.out.println(" Done!"); } catch(Exception e){ System.out.println(e.getMessage()); } } // Export graph try{ File graphFile = new File(graphFileDir, "_combined.graphml"); FileWriter fw = new FileWriter(graphFile); exportGraphML(graph, fw); fw.flush(); fw.close(); }catch(Exception e){ System.out.println(e.getMessage()); } } /** * Processes one spec file * @param specFile * @param exportDir */ private void processSpecFile(File specFile, File exportDir){ try{ init(); System.out.print("Doing spec file: " + specFile.getName() + "..."); File graphFile = new File(exportDir, specFile.getName() + ".graphml"); Graph<Node, Edge> graph = new SparseMultigraph<Node, Edge>(); String specDocument = FileUtils.readFileToString(specFile); populateGraph(graph, specDocument); FileWriter fw = new FileWriter(graphFile); exportGraphML(graph, fw); fw.flush(); fw.close(); System.out.println(" Done!"); } catch(Exception e){ System.out.println(e.getMessage()); } } /** * Initialize before graph building */ private void init() { // Clear map that stores registered nodes name2node.clear(); } /** * Build a new node for a given typedef, or return the existing one if it was built already * @param typedef * @return */ private Node buildNode(KbTypedef typedef){ String name = typedef.getName(); Node node = name2node.get(name); if(node == null){ node = new Node(typedef); name2node.put(name, node); } return node; } /** * Process a given type and all subtypes recursively, and add corresponding edges to the graph if needed * * @param graph * @param rootNode * @param edge * @param type * @throws Exception */ private void processTypedef(Graph<Node, Edge> graph, Node rootNode, Edge edge, KbType type) throws Exception{ // Do not crete type-type edges if not needed if(!useType2TypeEdges) return; KbTypedef typedef = null; // If the type is either "typedef" or "typedef structure" if(type instanceof KbTypedef){ typedef = (KbTypedef) type; type = typedef.getAliasType(); // Check whether typedef was processed before boolean wasProcessed = name2node.containsKey(typedef.getName()); // Create a new node for this typedef and add edge between this node and the rootNode Node node = buildNode(typedef); if(rootNode != null && node != null){ //graph. graph.addEdge(edge, node, rootNode, EdgeType.DIRECTED); } // Do not process typedef further if it was processed before if(wasProcessed){ return; } // Now the new node is the root rootNode = node; } if(type instanceof KbScalar){ // nothing to be done } else if(type instanceof KbList){ KbList kbList = (KbList) type; processTypedef(graph, rootNode, Edge.LIST_OF(), kbList.getElementType()); } else if (type instanceof KbMapping){ KbMapping kbMapping = (KbMapping) type; processTypedef(graph, rootNode, Edge.HASH_KEY(), kbMapping.getKeyType()); processTypedef(graph, rootNode, Edge.HASH_VALUE(), kbMapping.getValueType()); } else if (type instanceof KbStruct){ KbStruct kbStruct = (KbStruct) type; if(typedef == null){ throw new Exception("If the type is KbStruct, then typedef should exist"); } // Try add edges for all subtypes for(KbStructItem item: kbStruct.getItems()){ processTypedef(graph, rootNode, Edge.SUBTYPE(), item.getItemType()); } } } /** * Build a new node for a given function, or return the existing one if it was built already * @param func * @return */ private Node buildNode(KbFuncdef func){ String name = func.getName(); Node node = name2node.get(name); if(node == null){ node = new Node(func); } return node; } /** * Populate a graph for a given spec file * @param graph * @param specDocument * @throws Exception */ private void populateGraph(Graph<Node, Edge> graph, String specDocument) throws Exception { StringReader r = new StringReader(specDocument); Map<String, Map<String, String>> moduleToTypeToSchema = null; Map<?, ?> parseMap = KidlParser.parseSpecInt(r, moduleToTypeToSchema,sip); List<KbService> services = KidlParser.parseSpec(parseMap); for (KbService service : services) { for (KbModule module : service.getModules()) { for (KbModuleComp comp : module.getModuleComponents()) { if (comp instanceof KbTypedef) { KbTypedef typedef = (KbTypedef) comp; processTypedef(graph, null, null, typedef); } else if (comp instanceof KbFuncdef) { KbFuncdef func = (KbFuncdef) comp; Node funcNode = buildNode(func); for (KbParameter param : func.getParameters()) { KbType paramType = param.getType(); if (paramType instanceof KbTypedef) { // try to add edges for subtypes processTypedef(graph, null, null, paramType); // Add type-method edge only if it was requested if(useType2MethodEdges) { // add edge for dataype-method connection Node paramNode = buildNode((KbTypedef) paramType); graph.addEdge(Edge.METHOD_PARAM(), paramNode, funcNode, EdgeType.DIRECTED); } } } for (KbParameter param : func.getReturnType()) { KbType returnType = param.getType(); if (returnType instanceof KbTypedef) { // try to add edges for subtypes processTypedef(graph, null, null, returnType); // Add type-method edge only if it was requested if(useType2MethodEdges) { // add edge for dataype-method connection Node returnNode = buildNode((KbTypedef) returnType); graph.addEdge(Edge.METHOD_RETURN(), funcNode, returnNode, EdgeType.DIRECTED); } } } } } } } } /** * Export a graph in the GraphML format * @param graph * @param writer * @throws IOException */ private void exportGraphML(Graph<Node,Edge> graph, Writer writer) throws IOException{ /* Example of a node in yEd <data key="d0"> <y:ShapeNode> <y:Geometry x="165.0" y="178.0" width="30.0" height="30.0"/> <y:Fill color="#CCCCFF" transparent="false"/> <y:BorderStyle type="line" width="1.0" color="#000000"/> <y:NodeLabel x="9.5" y="5.6494140625" width="11.0" height="18.701171875" visible="true" alignment="center" fontFamily="Dialog" fontSize="12" fontStyle="plain" textColor="#000000" modelName="internal" modelPosition="c" autoSizePolicy="content">1</y:NodeLabel> <y:Shape type="rectangle"/> </y:ShapeNode> </data> */ GraphMLWriter<Node,Edge> graphWriter = new GraphMLWriter<Node,Edge>(); graphWriter.addVertexData("d0", null, null, new Transformer<Node, String>() { public String transform(Node node) { return "<y:ShapeNode>" + "<y:Shape type='rectangle'/>" + "<y:Fill color='" + (node.getType().equals(Node.TYPE_METHOD) ? "#FF5555" : "#CCCCFF") +"' transparent='false'/>" +"<y:NodeLabel>" + node.getName() +"</y:NodeLabel>" +"</y:ShapeNode>"; } } ); /* Example of an edge in yEd <edge id="e2" source="n1" target="n0"> <data key="d2"> <y:PolyLineEdge> <y:Path sx="0.0" sy="0.0" tx="0.0" ty="0.0"/> <y:LineStyle type="line" width="1.0" color="#000000"/> <y:Arrows source="none" target="none"/> <y:BendStyle smoothed="false"/> </y:PolyLineEdge> </data> <data key="d3">222</data> </edge> */ graphWriter.addEdgeData("d2", null, null, new Transformer<Edge, String>() { public String transform(Edge node) { return "<y:PolyLineEdge>" + "<y:Arrows source='none' target='delta'/>" +"</y:PolyLineEdge>"; } } ); StringWriter tmpWriter = new StringWriter(); graphWriter.save(graph, tmpWriter); adoptForYEd(tmpWriter.toString(), writer); } /** * Hack, to adopt graphML to be visualized in yEd * @param draphDoc * @param writer * @throws IOException */ private void adoptForYEd(String draphDoc, Writer writer) throws IOException { BufferedReader br = new BufferedReader(new StringReader(draphDoc)); BufferedWriter bw = new BufferedWriter(writer); bw.write( "<?xml version='1.0' encoding='UTF-8'?>" + "\n<graphml xmlns='http://graphml.graphdrawing.org/xmlns/graphml' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'" + "\nxsi:schemaLocation='http://graphml.graphdrawing.org/xmlns/graphml http://www.yworks.com/xml/schema/graphml/1.0/ygraphml.xsd'" + "\nxmlns:y='http://www.yworks.com/xml/graphml'>" + "\n<key id='d0' for='node' yfiles.type='nodegraphics'/>" + "\n<key id='d2' for='edge' yfiles.type='edgegraphics'/>" ); boolean startFound = false; for(String line = br.readLine(); line != null; line = br.readLine()){ if(!startFound){ startFound = line.startsWith("<graph "); } if(startFound){ bw.append("\n"); bw.append(line); } } bw.flush(); } /** * Export graph in Pajek format * @param graph * @param writer * @throws IOException */ private void exportPajek(Graph<Node,Edge> graph, Writer writer) throws IOException{ PajekNetWriter<Node,Edge> graphWriter = new PajekNetWriter<Node,Edge>(); graphWriter.save(graph, writer, new Transformer<Node,String>(){ @Override public String transform(Node node) { return node.toString(); }}, new Transformer<Edge,Number>(){ @Override public Number transform(Edge edge) { return 1; }} ); } /** * to try and learn basics of kidl * * @throws JsonGenerationException * @throws JsonMappingException * @throws KidlParseException * @throws IOException */ private void _learn() throws JsonGenerationException, JsonMappingException, KidlParseException, IOException{ Map<KbType, String> type2name = new Hashtable<KbType, String>(); String specDocument = FileUtils.readFileToString(new File("/kb/dev_container/modules/expression/KBaseExpression.spec")); StringReader r = new StringReader(specDocument); IncludeProvider sip = new StaticIncludeProvider(); /* List<String> includedModuleNames = null; for (String includedModuleName : includedModuleNames) { String includedModuleSpec = ""; sip.addSpecFile(includedModuleName, includedModuleSpec); } */ Map<String, Map<String, String>> moduleToTypeToSchema = null; Map<?,?> parseMap = KidlParser.parseSpecInt(r, moduleToTypeToSchema, sip); List<KbService> services = KidlParser.parseSpec(parseMap); for(KbService service: services){ System.out.println("service: " + service.getName()); for(KbModule module: service.getModules()){ System.out.println("\t module: " + module.getModuleName()); System.out.println("//----- Info {{"); for(KbTypeInfo typeInfo: module.getTypeInfoList()){ System.out.println("\t typeInfo:" + typeInfo.getName() + "; type: " + typeInfo.getRef()); } System.out.println("//----- Info }}"); System.out.println("//----- NameToType {{"); for(String name: module.getNameToType().keySet()){ KbType type = module.getNameToType().get(name); System.out.println("\t name:" + name + "; type: " + type + "; class: " + type.getClass().getName() + "; hashcode: " + Integer.toHexString(type.hashCode())); // type2name.put(type, name); } System.out.println("//----- NameToType }}"); for(KbModuleComp comp : module.getModuleComponents()){ if(comp instanceof KbTypedef){ KbTypedef typedef = (KbTypedef)comp; System.out.println("\t\t type: " + typedef.getName() + "; class: " + typedef.getClass().getName() + "; alias type: " + typedef.getAliasType()); type2name.put(typedef, typedef.getName()); KbType alias = typedef.getAliasType(); if( alias instanceof KbScalar){ KbScalar kbScalar = (KbScalar) alias; System.out.println("\t\t\t scalar IdReference: " + kbScalar.getIdReference() + "; scalarType: " + kbScalar.getScalarType() + "; specName: " + kbScalar.getSpecName() ); } else if( alias instanceof KbList){ KbList kbList = (KbList) alias; System.out.println("\t\t\t list element type: " + kbList.getElementType() + "; class: " + kbList.getElementType().getClass().getName()); } else if (alias instanceof KbMapping){ KbMapping kbMapping = (KbMapping) alias; System.out.println("\t\t\t mapping key type: " + kbMapping.getKeyType() + "; value type: " + kbMapping.getValueType() ); } else if (alias instanceof KbStruct){ KbStruct kbStruct = (KbStruct) alias; System.out.println("\t\t\t struct name: " + kbStruct.getName() + ", items: "); for(KbStructItem item: kbStruct.getItems()){ System.out.println("\t\t\t\t item name: " + item.getName() + "; class: " + item.getClass().getName() + "; itemType: " + item.getItemType() + "; itemType class: " + item.getItemType().getClass().getName() ); } } } else if(comp instanceof KbFuncdef){ KbFuncdef func = (KbFuncdef)comp; System.out.println("\t\t function: " + func.getName()); for (KbParameter param : func.getParameters()){ KbType paramType = param.getType(); System.out.println("\t\t\t Func param: " + param.getName() + "; type: " + paramType + "; class: " + paramType.getClass().getName() + "; hashcode: " + Integer.toHexString(paramType.hashCode()) + "; derived name: " + type2name.get(paramType)); } for (KbParameter param : func. getReturnType()){ KbType returnType = param.getType(); System.out.println("\t\t\t Return param: " + param.getName() + "; type: " + returnType + "; class: " + returnType.getClass().getName() + "; hashcode: " + Integer.toHexString(returnType.hashCode()) + "; derived name: " + type2name.get(returnType)); } } } } } System.out.println("//------------ check type2name"); for(KbType type: type2name.keySet()){ String name = type2name.get(type); System.out.println("name: " + name + "; type: " + type + "; class: " + type.getClass().getName() + "; hashcode: " + Integer.toHexString(type.hashCode()) + "; derived name: " + type2name.get(type)); } } public static void main(String[] args) throws Exception { useType2TypeEdges = false; useType2MethodEdges = true; File specFileDir = new File("/kb/dev_container/modules/nextgen/diagrams/typespecs/specs_clean/"); File graphFileDir = new File("/kb/dev_container/modules/nextgen/diagrams/typespecs/graphs_type_method/"); new TypeMethodGraphGenerator().run(specFileDir, graphFileDir); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.presto.OutputBuffers; import com.facebook.presto.TaskSource; import com.facebook.presto.execution.BufferResult; import com.facebook.presto.execution.SharedBuffer; import com.facebook.presto.execution.TaskId; import com.facebook.presto.execution.TaskInfo; import com.facebook.presto.execution.TaskManager; import com.facebook.presto.execution.TaskState; import com.facebook.presto.execution.TaskStateMachine; import com.facebook.presto.operator.Page; import com.facebook.presto.operator.TaskContext; import com.facebook.presto.sql.analyzer.Session; import com.facebook.presto.sql.planner.PlanFragment; import com.facebook.presto.sql.planner.plan.PlanNodeId; import com.facebook.presto.execution.ExecutionFailureInfo; import com.facebook.presto.util.Threads; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import io.airlift.http.server.HttpServerInfo; import io.airlift.units.DataSize; import io.airlift.units.DataSize.Unit; import io.airlift.units.Duration; import org.joda.time.DateTime; import javax.inject.Inject; import java.net.URI; import java.util.Collections; import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicLong; import static com.facebook.presto.block.BlockAssertions.createStringsBlock; import static com.facebook.presto.util.Failures.toFailures; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static io.airlift.http.client.HttpUriBuilder.uriBuilderFrom; import static io.airlift.units.DataSize.Unit.MEGABYTE; public class MockTaskManager implements TaskManager { private final Executor executor = Executors.newCachedThreadPool(Threads.daemonThreadsNamed("test-%d")); private final HttpServerInfo httpServerInfo; private final DataSize maxBufferSize; private final int initialPages; private final ConcurrentMap<TaskId, MockTask> tasks = new ConcurrentHashMap<>(); @Inject public MockTaskManager(HttpServerInfo httpServerInfo) { this(httpServerInfo, new DataSize(100, Unit.MEGABYTE), 12); } public MockTaskManager(HttpServerInfo httpServerInfo, DataSize maxBufferSize, int initialPages) { checkNotNull(httpServerInfo, "httpServerInfo is null"); Preconditions.checkArgument(maxBufferSize.toBytes() > 0, "pageBufferMax must be at least 1"); Preconditions.checkArgument(initialPages >= 0, "initialPages is negative"); Preconditions.checkArgument(initialPages <= maxBufferSize.toBytes(), "initialPages is greater than maxBufferSize"); this.httpServerInfo = httpServerInfo; this.maxBufferSize = maxBufferSize; this.initialPages = initialPages; } @Override public synchronized List<TaskInfo> getAllTaskInfo(boolean full) { ImmutableList.Builder<TaskInfo> builder = ImmutableList.builder(); for (MockTask task : tasks.values()) { builder.add(task.getTaskInfo()); } return builder.build(); } @Override public void waitForStateChange(TaskId taskId, TaskState currentState, Duration maxWait) throws InterruptedException { } @Override public synchronized TaskInfo getTaskInfo(TaskId taskId, boolean full) { checkNotNull(taskId, "taskId is null"); MockTask task = tasks.get(taskId); if (task == null) { throw new NoSuchElementException(); } return task.getTaskInfo(); } @Override public synchronized TaskInfo updateTask(Session session, TaskId taskId, PlanFragment ignored, List<TaskSource> sources, OutputBuffers outputBuffers) { checkNotNull(session, "session is null"); checkNotNull(taskId, "taskId is null"); checkNotNull(sources, "sources is null"); checkNotNull(outputBuffers, "outputBuffers is null"); MockTask task = tasks.get(taskId); if (task == null) { task = new MockTask(session, taskId, uriBuilderFrom(httpServerInfo.getHttpUri()).appendPath("v1/task").appendPath(taskId.toString()).build(), outputBuffers, maxBufferSize, initialPages, executor ); tasks.put(taskId, task); } task.addOutputBuffers(outputBuffers); return task.getTaskInfo(); } @Override public BufferResult getTaskResults(TaskId taskId, String outputId, long startingSequenceId, DataSize maxSize, Duration maxWaitTime) throws InterruptedException { checkNotNull(taskId, "taskId is null"); checkNotNull(outputId, "outputId is null"); MockTask task; synchronized (this) { task = tasks.get(taskId); } if (task == null) { throw new NoSuchElementException(); } return task.getResults(outputId, startingSequenceId, maxSize, maxWaitTime); } @Override public synchronized TaskInfo abortTaskResults(TaskId taskId, String outputId) { checkNotNull(taskId, "taskId is null"); checkNotNull(outputId, "outputId is null"); MockTask task = tasks.get(taskId); if (task == null) { throw new NoSuchElementException(); } task.abortResults(outputId); return task.getTaskInfo(); } @Override public synchronized TaskInfo cancelTask(TaskId taskId) { checkNotNull(taskId, "taskId is null"); MockTask task = tasks.get(taskId); if (task == null) { return null; } task.cancel(); return task.getTaskInfo(); } public static class MockTask { private final AtomicLong nextTaskInfoVersion = new AtomicLong(TaskInfo.STARTING_VERSION); private final URI location; private final TaskStateMachine taskStateMachine; private final TaskContext taskContext; private final SharedBuffer sharedBuffer; public MockTask(Session session, TaskId taskId, URI location, OutputBuffers outputBuffers, DataSize maxBufferSize, int initialPages, Executor executor) { this.taskStateMachine = new TaskStateMachine(checkNotNull(taskId, "taskId is null"), checkNotNull(executor, "executor is null")); this.taskContext = new TaskContext(taskStateMachine, executor, session, new DataSize(256, MEGABYTE), new DataSize(1, MEGABYTE), true); this.location = checkNotNull(location, "location is null"); this.sharedBuffer = new SharedBuffer(taskId, executor, checkNotNull(maxBufferSize, "maxBufferSize is null"), outputBuffers); List<String> data = ImmutableList.of("apple", "banana", "cherry", "date"); // load initial pages for (int i = 0; i < initialPages; i++) { checkState(sharedBuffer.enqueue(new Page(createStringsBlock(Iterables.concat(Collections.nCopies(i + 1, data))))).isDone(), "Unable to add page to buffer"); } sharedBuffer.finish(); } public void abortResults(String outputId) { sharedBuffer.abort(outputId); } public void addOutputBuffers(OutputBuffers outputBuffers) { sharedBuffer.setOutputBuffers(outputBuffers); } public void cancel() { taskStateMachine.cancel(); } public BufferResult getResults(String outputId, long startingSequenceId, DataSize maxSize, Duration maxWaitTime) throws InterruptedException { return sharedBuffer.get(outputId, startingSequenceId, maxSize, maxWaitTime); } public TaskInfo getTaskInfo() { TaskState state = taskStateMachine.getState(); List<ExecutionFailureInfo> failures = ImmutableList.of(); if (state == TaskState.FAILED) { failures = toFailures(taskStateMachine.getFailureCauses()); } return new TaskInfo( taskStateMachine.getTaskId(), nextTaskInfoVersion.getAndIncrement(), state, location, DateTime.now(), sharedBuffer.getInfo(), ImmutableSet.<PlanNodeId>of(), taskContext.getTaskStats(), failures); } } }
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package dev.flutter.scenariosui; import android.graphics.Bitmap; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.util.Xml; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.test.InstrumentationRegistry; import androidx.test.runner.AndroidJUnitRunner; import com.facebook.testing.screenshot.ScreenshotRunner; import com.facebook.testing.screenshot.internal.AlbumImpl; import com.facebook.testing.screenshot.internal.Registry; import com.facebook.testing.screenshot.internal.TestNameDetector; import dev.flutter.scenarios.TestableFlutterActivity; import java.io.BufferedOutputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import org.xmlpull.v1.XmlSerializer; /** * Adapter for {@code com.facebook.testing.screenshot.Screenshot} that supports Flutter apps. * * <p>{@code com.facebook.testing.screenshot.Screenshot} relies on {@code View#draw(canvas)}, which * doesn't draw Flutter's Surface or SurfaceTexture. * * <p>The workaround takes a full screenshot of the device and removes the status and action bars. */ public class ScreenshotUtil { private XmlSerializer serializer; private AlbumImpl album; private OutputStream streamOutput; private static ScreenshotUtil instance; private static int BUFFER_SIZE = 1 << 16; // 64K @NonNull protected static ScreenshotUtil getInstance() { synchronized (ScreenshotUtil.class) { if (instance == null) { instance = new ScreenshotUtil(); } return instance; } } /** Starts the album, which contains the screenshots in a zip file, and a metadata.xml file. */ void init() { if (serializer != null) { return; } album = AlbumImpl.create(Registry.getRegistry().instrumentation.getContext(), "default"); // Delete all screenshots in the device associated with this album. album.cleanup(); serializer = Xml.newSerializer(); try { streamOutput = new BufferedOutputStream(new FileOutputStream(album.getMetadataFile()), BUFFER_SIZE); } catch (FileNotFoundException e) { throw new RuntimeException(e); } try { serializer.setOutput(streamOutput, "utf-8"); serializer.startDocument("utf-8", null); // Start tag <screenshots>. serializer.startTag(null, "screenshots"); } catch (IOException e) { throw new RuntimeException(e); } } void writeText(String tagName, String value) throws IOException { if (serializer == null) { throw new RuntimeException("ScreenshotUtil must be initialized. Call init()."); } serializer.startTag(null, tagName); serializer.text(value); serializer.endTag(null, tagName); } void writeBitmap(Bitmap bitmap, String name, String testClass, String testName) throws IOException { if (serializer == null) { throw new RuntimeException("ScreenshotUtil must be initialized. Call init()."); } album.writeBitmap(name, 0, 0, bitmap); serializer.startTag(null, "screenshot"); writeText("name", name); writeText("test_class", testClass); writeText("test_name", testName); writeText("tile_width", "1"); writeText("tile_height", "1"); serializer.endTag(null, "screenshot"); } /** Finishes metadata.xml. */ void flush() { if (serializer == null) { throw new RuntimeException("ScreenshotUtil must be initialized. Call init()."); } try { // End tag </screenshots> serializer.endTag(null, "screenshots"); serializer.endDocument(); serializer.flush(); } catch (IOException e) { throw new RuntimeException(e); } try { streamOutput.close(); } catch (IOException e) { throw new RuntimeException(e); } album.flush(); serializer = null; streamOutput = null; album = null; } /** * Captures a screenshot of {@code TestableFlutterActivity}. * * <p>The activity must be already launched. */ public static void capture(@NonNull TestableFlutterActivity activity) throws InterruptedException, ExecutionException, IOException { // Yield and wait for the engine to render the first Flutter frame. activity.waitUntilFlutterRendered(); // This method is called from the runner thread, // so block the UI thread while taking the screenshot. // Screenshot.capture(view or activity) does not capture the Flutter UI. // Unfortunately, it doesn't work with Android's `Surface` or `TextureSurface`. // // As a result, capture a screenshot of the entire device and then clip // the status and action bars. // // Under the hood, this call is similar to `adb screencap`, which is used // to capture screenshots. final String testClass = TestNameDetector.getTestClass(); final String testName = TestNameDetector.getTestName(); runCallableOnUiThread( new Callable<Void>() { @Override public Void call() { Bitmap bitmap = InstrumentationRegistry.getInstrumentation().getUiAutomation().takeScreenshot(); // Remove the status and action bars from the screenshot capture. bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight()); final String screenshotName = String.format("%s__%s", testClass, testName); // Write bitmap to the album. try { ScreenshotUtil.getInstance().writeBitmap(bitmap, screenshotName, testClass, testName); } catch (IOException e) { throw new RuntimeException(e); } return null; } }); } /** * Initializes the {@code com.facebook.testing.screenshot.internal.Album}. * * <p>Call this method from {@code AndroidJUnitRunner#onCreate}. */ public static void onCreate(@NonNull AndroidJUnitRunner runner, @Nullable Bundle arguments) { ScreenshotRunner.onCreate(runner, arguments); ScreenshotUtil.getInstance().init(); } /** * Flushes the {@code com.facebook.testing.screenshot.internal.Album}. * * <p>Call this method from {@code AndroidJUnitRunner#onDestroy}. */ public static void onDestroy() { ScreenshotRunner.onDestroy(); ScreenshotUtil.getInstance().flush(); } private static void runCallableOnUiThread(final Callable<Void> callable) { if (Looper.getMainLooper().getThread() == Thread.currentThread()) { try { callable.call(); } catch (Exception e) { e.printStackTrace(); } return; } Handler handler = new Handler(Looper.getMainLooper()); final Object lock = new Object(); synchronized (lock) { handler.post( new Runnable() { @Override public void run() { try { callable.call(); } catch (Exception e) { e.printStackTrace(); } synchronized (lock) { lock.notifyAll(); } } }); try { lock.wait(); } catch (InterruptedException e) { throw new RuntimeException(e); } } } }