gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.hl7; import java.io.ByteArrayInputStream; import java.nio.charset.Charset; import ca.uhn.hl7v2.model.Message; import ca.uhn.hl7v2.model.v24.message.ADR_A19; import ca.uhn.hl7v2.model.v24.segment.MSA; import ca.uhn.hl7v2.model.v24.segment.MSH; import ca.uhn.hl7v2.model.v24.segment.QRD; import org.apache.camel.Exchange; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Test; /** * Unit test for HL7 DataFormat. */ public class HL7DataFormatTest extends CamelTestSupport { private static final String NONE_ISO_8859_1 = "\u221a\u00c4\u221a\u00e0\u221a\u00e5\u221a\u00ed\u221a\u00f4\u2248\u00ea"; private HL7DataFormat hl7 = new HL7DataFormat(); private HL7DataFormat hl7big5 = new HL7DataFormat() { @Override protected String guessCharsetName(byte[] b, Exchange exchange) { return "Big5"; } }; @Test public void testMarshal() throws Exception { MockEndpoint mock = getMockEndpoint("mock:marshal"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(byte[].class); mock.message(0).body(String.class).contains("MSA|AA|123"); mock.message(0).body(String.class).contains("QRD|20080805120000"); Message message = createHL7AsMessage(); template.sendBody("direct:marshal", message); assertMockEndpointsSatisfied(); } @Test public void testMarshalISO8859() throws Exception { MockEndpoint mock = getMockEndpoint("mock:marshal"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(byte[].class); mock.message(0).body(String.class).contains("MSA|AA|123"); mock.message(0).body(String.class).contains("QRD|20080805120000"); mock.message(0).body(String.class).not().contains(NONE_ISO_8859_1); Message message = createHL7AsMessage(); template.sendBodyAndProperty("direct:marshal", message, Exchange.CHARSET_NAME, "ISO-8859-1"); assertMockEndpointsSatisfied(); } @Test public void testMarshalUTF16InMessage() throws Exception { String charsetName = "UTF-16"; MockEndpoint mock = getMockEndpoint("mock:marshal"); mock.expectedMessageCount(1); Message message = createHL7WithCharsetAsMessage(HL7Charset.getHL7Charset(charsetName)); template.sendBodyAndProperty("direct:marshal", message, Exchange.CHARSET_NAME, charsetName); assertMockEndpointsSatisfied(); byte[] body = (byte[])mock.getExchanges().get(0).getIn().getBody(); String msg = new String(body, Charset.forName(charsetName)); assertTrue(msg.contains("MSA|AA|123")); assertTrue(msg.contains("QRD|20080805120000")); } @Test public void testMarshalUTF8() throws Exception { MockEndpoint mock = getMockEndpoint("mock:marshal"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(byte[].class); mock.message(0).body(String.class).contains("MSA|AA|123"); mock.message(0).body(String.class).contains("QRD|20080805120000"); mock.message(0).body(String.class).contains(NONE_ISO_8859_1); Message message = createHL7AsMessage(); template.sendBodyAndProperty("direct:marshal", message, Exchange.CHARSET_NAME, "UTF-8"); assertMockEndpointsSatisfied(); } @Test public void testUnmarshal() throws Exception { MockEndpoint mock = getMockEndpoint("mock:unmarshal"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(Message.class); mock.expectedHeaderReceived(HL7Constants.HL7_SENDING_APPLICATION, "MYSENDER"); mock.expectedHeaderReceived(HL7Constants.HL7_SENDING_FACILITY, "MYSENDERAPP"); mock.expectedHeaderReceived(HL7Constants.HL7_RECEIVING_APPLICATION, "MYCLIENT"); mock.expectedHeaderReceived(HL7Constants.HL7_RECEIVING_FACILITY, "MYCLIENTAPP"); mock.expectedHeaderReceived(HL7Constants.HL7_TIMESTAMP, "200612211200"); mock.expectedHeaderReceived(HL7Constants.HL7_SECURITY, null); mock.expectedHeaderReceived(HL7Constants.HL7_MESSAGE_TYPE, "QRY"); mock.expectedHeaderReceived(HL7Constants.HL7_TRIGGER_EVENT, "A19"); mock.expectedHeaderReceived(HL7Constants.HL7_MESSAGE_CONTROL, "1234"); mock.expectedHeaderReceived(HL7Constants.HL7_PROCESSING_ID, "P"); mock.expectedHeaderReceived(HL7Constants.HL7_VERSION_ID, "2.4"); mock.expectedHeaderReceived(HL7Constants.HL7_CONTEXT, hl7.getHapiContext()); mock.expectedHeaderReceived(HL7Constants.HL7_CHARSET, null); mock.expectedHeaderReceived(Exchange.CHARSET_NAME, "UTF-8"); String body = createHL7AsString(); template.sendBody("direct:unmarshal", body); assertMockEndpointsSatisfied(); Message msg = mock.getExchanges().get(0).getIn().getBody(Message.class); assertEquals("2.4", msg.getVersion()); QRD qrd = (QRD) msg.get("QRD"); assertEquals("0101701234", qrd.getWhoSubjectFilter(0).getIDNumber().getValue()); } @Test public void testUnmarshalWithExplicitUTF16Charset() throws Exception { String charset = "UTF-16"; MockEndpoint mock = getMockEndpoint("mock:unmarshal"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(Message.class); mock.expectedHeaderReceived(HL7Constants.HL7_CHARSET, HL7Charset.getHL7Charset(charset).getHL7CharsetName()); mock.expectedHeaderReceived(Exchange.CHARSET_NAME, charset); // Message with explicit encoding in MSH-18 byte[] body = createHL7WithCharsetAsString(HL7Charset.UTF_16).getBytes(Charset.forName(charset)); template.sendBodyAndHeader("direct:unmarshal", new ByteArrayInputStream(body), Exchange.CHARSET_NAME, charset); assertMockEndpointsSatisfied(); Message msg = mock.getExchanges().get(0).getIn().getBody(Message.class); assertEquals("2.4", msg.getVersion()); QRD qrd = (QRD) msg.get("QRD"); assertEquals("0101701234", qrd.getWhoSubjectFilter(0).getIDNumber().getValue()); } @Test public void testUnmarshalWithImplicitBig5Charset() throws Exception { String charset = "Big5"; MockEndpoint mock = getMockEndpoint("mock:unmarshalBig5"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(Message.class); mock.expectedHeaderReceived(HL7Constants.HL7_CHARSET, null); mock.expectedHeaderReceived(Exchange.CHARSET_NAME, charset); // Message without explicit encoding in MSH-18, but the unmarshaller "guesses" // this time that it is Big5 byte[] body = createHL7AsString().getBytes(Charset.forName(charset)); template.sendBody("direct:unmarshalBig5", new ByteArrayInputStream(body)); assertMockEndpointsSatisfied(); Message msg = mock.getExchanges().get(0).getIn().getBody(Message.class); assertEquals("2.4", msg.getVersion()); QRD qrd = (QRD) msg.get("QRD"); assertEquals("0101701234", qrd.getWhoSubjectFilter(0).getIDNumber().getValue()); } protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { public void configure() throws Exception { from("direct:marshal").marshal().hl7().to("mock:marshal"); from("direct:unmarshal").unmarshal(hl7).to("mock:unmarshal"); from("direct:unmarshalBig5").unmarshal(hl7big5).to("mock:unmarshalBig5"); } }; } private static String createHL7AsString() { return createHL7WithCharsetAsString(null); } private static String createHL7WithCharsetAsString(HL7Charset charset) { String hl7Charset = charset == null ? "" : charset.getHL7CharsetName(); String line1 = String.format("MSH|^~\\&|MYSENDER|MYSENDERAPP|MYCLIENT|MYCLIENTAPP|200612211200||QRY^A19|1234|P|2.4||||||%s", hl7Charset); String line2 = "QRD|200612211200|R|I|GetPatient|||1^RD|0101701234|DEM||"; StringBuilder body = new StringBuilder(); body.append(line1); body.append("\r"); body.append(line2); return body.toString(); } private static ADR_A19 createHL7AsMessage() throws Exception { ADR_A19 adr = new ADR_A19(); // Populate the MSH Segment MSH mshSegment = adr.getMSH(); mshSegment.getFieldSeparator().setValue("|"); mshSegment.getEncodingCharacters().setValue("^~\\&"); mshSegment.getDateTimeOfMessage().getTimeOfAnEvent().setValue("200701011539"); mshSegment.getSendingApplication().getNamespaceID().setValue("MYSENDER"); mshSegment.getSequenceNumber().setValue("123"); mshSegment.getMessageType().getMessageType().setValue("ADR"); mshSegment.getMessageType().getTriggerEvent().setValue("A19"); // Populate the PID Segment MSA msa = adr.getMSA(); msa.getAcknowledgementCode().setValue("AA"); msa.getMessageControlID().setValue("123"); msa.getMsa3_TextMessage().setValue(NONE_ISO_8859_1); QRD qrd = adr.getQRD(); qrd.getQueryDateTime().getTimeOfAnEvent().setValue("20080805120000"); return adr; } private static ADR_A19 createHL7WithCharsetAsMessage(HL7Charset charset) throws Exception { ADR_A19 adr = createHL7AsMessage(); adr.getMSH().getCharacterSet(0).setValue(charset.getHL7CharsetName()); return adr; } }
/* * Copyright 2013 Zakhar Prykhoda * * midao.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.midao.jdbc.core.handlers.type; import junit.framework.Assert; import org.junit.Before; import org.junit.Test; import org.midao.jdbc.core.MjdbcConstants; import org.midao.jdbc.core.Overrider; import org.midao.jdbc.core.exception.MjdbcException; import org.midao.jdbc.core.handlers.utils.MappingUtils; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import java.io.*; import java.sql.*; import java.util.Arrays; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.*; /** */ public class TypeHandlerUtilsTest { @Mock Connection conn; @Mock Array array; @Mock Blob blob; @Mock Clob clob; //@Mock SQLXML sqlXml; @Mock OutputStream output; @Mock InputStream input; Object sqlXml; @Before public void setUp() throws ClassNotFoundException { MockitoAnnotations.initMocks(this); sqlXml = Mockito.mock(Class.forName("java.sql.SQLXML")); } @Test public void testConvertArrayArray() throws Exception { Object[] array = new String[]{"Sun"}; TypeHandlerUtils.convertArray(conn, array); //verify(conn, times(1)).createArrayOf("VARCHAR", array); MappingUtils.invokeFunction(verify(conn, times(1)), "createArrayOf", new Class[]{String.class, Object[].class}, new Object[]{"VARCHAR", array}); } @Test public void testConvertArrayCollection() throws Exception { Object[] array = new String[]{"Venus"}; TypeHandlerUtils.convertArray(conn, Arrays.asList(array)); //verify(conn, times(1)).createArrayOf("VARCHAR", array); MappingUtils.invokeFunction(verify(conn, times(1)), "createArrayOf", new Class[]{String.class, Object[].class}, new Object[]{"VARCHAR", array}); } @Test public void testConvertBlobConnByte() throws Exception { testConvertBlobPrepare(); TypeHandlerUtils.convertBlob(conn, "GIF".getBytes()); //verify(conn, times(1)).createBlob(); MappingUtils.invokeFunction(verify(conn, times(1)), "createBlob", new Class[]{}, new Object[]{}); testConvertBlobCheck(); } @Test public void testConvertBlobConnInputStream() throws Exception { testConvertBlobPrepare(); TypeHandlerUtils.convertBlob(conn, new ByteArrayInputStream("BMP".getBytes())); //verify(conn, times(1)).createBlob(); MappingUtils.invokeFunction(verify(conn, times(1)), "createBlob", new Class[]{}, new Object[]{}); testConvertBlobCheck(); } @Test public void testConvertBlobConnString() throws Exception { testConvertBlobPrepare(); TypeHandlerUtils.convertBlob(conn, "PCX"); //verify(conn, times(1)).createBlob(); MappingUtils.invokeFunction(verify(conn, times(1)), "createBlob", new Class[]{}, new Object[]{}); testConvertBlobCheck(); } @Test public void testConvertBlobByte() throws Exception { testConvertBlobPrepare(); TypeHandlerUtils.convertBlob(blob, "JPEG".getBytes()); testConvertBlobCheck(); } @Test public void testConvertBlobInputStream() throws Exception { testConvertBlobPrepare(); TypeHandlerUtils.convertBlob(blob, new ByteArrayInputStream("PNG".getBytes())); testConvertBlobCheck(); } @Test public void testConvertBlobString() throws Exception { testConvertBlobPrepare(); TypeHandlerUtils.convertBlob(blob, "TIFF"); testConvertBlobCheck(); } private void testConvertBlobPrepare() throws SQLException, MjdbcException { //when(conn.createBlob()).thenReturn(blob); when(MappingUtils.invokeFunction(conn, "createBlob", new Class[]{}, new Object[]{})).thenReturn(blob); when(blob.setBinaryStream(1)).thenReturn(output); } private void testConvertBlobCheck() throws Exception { verify(blob, times(1)).setBinaryStream(1); verify(output).write(any(byte[].class), eq(0), any(int.class)); verify(output, times(1)).flush(); verify(output, times(1)).close(); } @Test public void testConvertClobConnByte() throws Exception { testConvertClobPrepare(); TypeHandlerUtils.convertClob(conn, "GIF".getBytes()); //verify(conn, times(1)).createClob(); MappingUtils.invokeFunction(verify(conn, times(1)), "createClob", new Class[]{}, new Object[]{}); testConvertClobCheck(); } @Test public void testConvertClobConnInputStream() throws Exception { testConvertClobPrepare(); TypeHandlerUtils.convertClob(conn, new ByteArrayInputStream("BMP".getBytes())); //verify(conn, times(1)).createClob(); MappingUtils.invokeFunction(verify(conn, times(1)), "createClob", new Class[]{}, new Object[]{}); testConvertClobCheck(); } @Test public void testConvertClobConnString() throws Exception { testConvertClobPrepare(); TypeHandlerUtils.convertClob(conn, "PCX"); //verify(conn, times(1)).createClob(); MappingUtils.invokeFunction(verify(conn, times(1)), "createClob", new Class[]{}, new Object[]{}); testConvertClobCheck(); } @Test public void testConvertClobByte() throws Exception { testConvertClobPrepare(); TypeHandlerUtils.convertClob(clob, "JPEG".getBytes()); testConvertClobCheck(); } @Test public void testConvertClobInputStream() throws Exception { testConvertClobPrepare(); TypeHandlerUtils.convertClob(clob, new ByteArrayInputStream("PNG".getBytes())); testConvertClobCheck(); } @Test public void testConvertClobString() throws Exception { testConvertClobPrepare(); TypeHandlerUtils.convertClob(clob, "TIFF"); testConvertClobCheck(); } private void testConvertClobPrepare() throws SQLException, MjdbcException { //when(conn.createClob()).thenReturn(clob); when(MappingUtils.invokeFunction(conn, "createClob", new Class[]{}, new Object[]{})).thenReturn(clob); when(clob.setAsciiStream(1)).thenReturn(output); } private void testConvertClobCheck() throws Exception { verify(clob, times(1)).setAsciiStream(1); verify(output).write(any(byte[].class), eq(0), any(int.class)); verify(output, times(1)).flush(); verify(output, times(1)).close(); } @Test public void testConvertSqlXmlConnByte() throws Exception { testConvertSqlXmlPrepare(); TypeHandlerUtils.convertSqlXml(conn, "GIF".getBytes()); //verify(conn, times(1)).createSQLXML(); MappingUtils.invokeFunction(verify(conn, times(1)), "createSQLXML", new Class[]{}, new Object[]{}); testConvertSqlXmlCheck(); } @Test public void testConvertSqlXmlConnInputStream() throws Exception { testConvertSqlXmlPrepare(); TypeHandlerUtils.convertSqlXml(conn, new ByteArrayInputStream("BMP".getBytes())); //verify(conn, times(1)).createSQLXML(); MappingUtils.invokeFunction(verify(conn, times(1)), "createSQLXML", new Class[]{}, new Object[]{}); testConvertSqlXmlCheck(); } @Test public void testConvertSqlXmlConnString() throws Exception { testConvertSqlXmlPrepare(); TypeHandlerUtils.convertSqlXml(conn, "PCX"); //verify(conn, times(1)).createSQLXML(); MappingUtils.invokeFunction(verify(conn, times(1)), "createSQLXML", new Class[]{}, new Object[]{}); testConvertSqlXmlCheck(); } @Test public void testConvertSqlXmlByte() throws Exception { testConvertSqlXmlPrepare(); TypeHandlerUtils.convertSqlXml(sqlXml, "JPEG".getBytes()); testConvertSqlXmlCheck(); } @Test public void testConvertSqlXmlInputStream() throws Exception { testConvertSqlXmlPrepare(); TypeHandlerUtils.convertSqlXml(sqlXml, new ByteArrayInputStream("PNG".getBytes())); testConvertSqlXmlCheck(); } @Test public void testConvertSqlXmlString() throws Exception { testConvertSqlXmlPrepare(); TypeHandlerUtils.convertSqlXml(sqlXml, "TIFF"); testConvertSqlXmlCheck(); } private void testConvertSqlXmlPrepare() throws SQLException, MjdbcException { //when(conn.createSQLXML()).thenReturn(sqlXml); when(MappingUtils.invokeFunction(conn, "createSQLXML", new Class[]{}, new Object[]{})).thenReturn(sqlXml); //when(sqlXml.setBinaryStream()).thenReturn(output); when(MappingUtils.invokeFunction(sqlXml, "setBinaryStream", new Class[]{}, new Object[]{})).thenReturn(output); } private void testConvertSqlXmlCheck() throws Exception { //verify(sqlXml, times(1)).setBinaryStream(); MappingUtils.invokeFunction(verify(sqlXml, times(1)), "setBinaryStream", new Class[]{}, new Object[]{}); verify(output).write(any(byte[].class), eq(0), any(int.class)); verify(output, times(1)).flush(); verify(output, times(1)).close(); } @Test public void testConvertJavaClassToSqlType() throws Exception { Assert.assertEquals("VARCHAR", TypeHandlerUtils.convertJavaClassToSqlType("String")); } @Test public void testReadBlobClose() throws Exception { testReadBlob(true); } @Test public void testReadBlob() throws Exception { testReadBlob(false); } private void testReadBlob(boolean close) throws Exception { byte[] result = null; String data = "LZW"; when(blob.getBinaryStream()).thenReturn(new ByteArrayInputStream(data.getBytes())); if (close == true) { result = TypeHandlerUtils.readBlob(blob); } else { result = TypeHandlerUtils.readBlob(blob, false); } Assert.assertEquals(data, new String(result)); } @Test public void testReadClobClose() throws Exception { testReadClob(true); } @Test public void testReadClob() throws Exception { testReadClob(false); } private void testReadClob(boolean close) throws Exception { byte[] result = null; String data = "ZIP"; when(clob.getAsciiStream()).thenReturn(new ByteArrayInputStream(data.getBytes())); if (close == true) { result = TypeHandlerUtils.readClob(clob); } else { result = TypeHandlerUtils.readClob(clob, false); } Assert.assertEquals(data, new String(result)); } @Test public void testReadSqlXmlClose() throws Exception { testReadSqlXml(true); } @Test public void testReadSqlXml() throws Exception { testReadSqlXml(false); } private void testReadSqlXml(boolean close) throws Exception { byte[] result = null; String data = "tar.gz"; //when(sqlXml.getBinaryStream()).thenReturn(new ByteArrayInputStream(data.getBytes())); when(MappingUtils.invokeFunction(sqlXml, "getBinaryStream", new Class[]{}, new Object[]{})).thenReturn(new ByteArrayInputStream(data.getBytes())); if (close == true) { result = TypeHandlerUtils.readSqlXml(sqlXml); } else { result = TypeHandlerUtils.readSqlXml(sqlXml, false); } Assert.assertEquals(data, new String(result)); } @Test public void testToByteArray() throws Exception { String data = "ace"; byte[] result = null; result = TypeHandlerUtils.toByteArray(new ByteArrayInputStream(data.getBytes())); Assert.assertEquals(data, new String(result)); } @Test public void testToString() throws Exception { String data = "tar.gz"; String result = ""; result = TypeHandlerUtils.toString(new StringReader(data)); Assert.assertEquals(data, result); } @Test public void testCloseQuietlyInput() throws Exception { Mockito.doThrow(new IOException()).when(input).close(); try { TypeHandlerUtils.closeQuietly(input); } catch (Exception ex) { fail(); } } @Test public void testCloseQuietlyOutput() throws Exception { Mockito.doThrow(new IOException()).when(output).close(); try { TypeHandlerUtils.closeQuietly(output); } catch (Exception ex) { fail(); } } @Test public void testIsJDBC3() { Overrider overrider = new Overrider(); Assert.assertEquals(false, TypeHandlerUtils.isJDBC3(overrider)); overrider.override(MjdbcConstants.OVERRIDE_INT_JDBC3, true); Assert.assertEquals(true, TypeHandlerUtils.isJDBC3(overrider)); overrider.override(MjdbcConstants.OVERRIDE_INT_JDBC3, false); Assert.assertEquals(false, TypeHandlerUtils.isJDBC3(overrider)); } @Test public void testCopy() throws Exception { String data = "rar"; ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); InputStream inputStream = new ByteArrayInputStream(data.getBytes()); TypeHandlerUtils.copy(inputStream, outputStream); Assert.assertEquals(data, new String(outputStream.toByteArray())); } }
/** * * Copyright 2017 Florian Erhard * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package gedi.util.math.stat.factor; import gedi.util.ArrayUtils; import gedi.util.datastructure.array.IntegerArray; import gedi.util.datastructure.array.NumericArray; import gedi.util.functions.EI; import gedi.util.io.randomaccess.BinaryReader; import gedi.util.io.randomaccess.BinaryWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.WeakHashMap; import java.util.function.UnaryOperator; public class Factor implements Comparable<Factor> { private String[] names; private HashMap<String,Integer> nameToIndex; Factor below; Factor above; Factor[] levels; int index; private Factor() {} Factor(Factor proto, int index, Factor below, Factor above) { this.names = proto.names; this.index = index; this.nameToIndex = proto.nameToIndex; this.levels = proto.levels; if (index>=0 && index<levels.length) this.levels[index] = this; this.above = above; this.below = below; } public int getIndex(String name) { return nameToIndex.get(name); } public Factor get(String name) { return levels[nameToIndex.get(name)]; } public Factor get(int index) { if (index<0) return below; if (index>=levels.length) return above; return levels[index]; } public int getIndex() { return index; } public int length() { return levels.length; } public String[] getNames() { return names; } public Factor[] getLevels() { return levels; } public String name() { if (index<0) return "<"; if (index>=names.length) return ">"; return names[index]; } @Override public int compareTo(Factor o) { if (o.levels!=levels) throw new RuntimeException("Do not compare different factors!"); return Integer.compare(this.index, o.index); } @Override public int hashCode() { return Integer.hashCode(index); } @Override public boolean equals(Object obj) { return this==obj; } @Override public String toString() { return name(); } /** * Creates a factor for the given names * @param names * @return */ public static Factor create(String... names) { return create(names,null,t->t); } /** * Creates a factor for the given names * @param names * @return */ public static Factor create(Comparator<String> sorting, String... names) { return create(names,sorting,t->t); } /** * Creates a factor for the given names * @param names * @return */ public static Factor createSorted(String... names) { return create(names,(a,b)->a.compareTo(b),t->t); } public static Factor create(String[] names, UnaryOperator<Factor> fun) { return create(names,null,fun); } public static Factor create(String[] names, Comparator<String> sorting, UnaryOperator<Factor> fun) { HashMap<String,Integer> nameToIndex = ArrayUtils.createIndexMap(names); if (sorting!=null) { String[] k = nameToIndex.keySet().toArray(new String[0]); Arrays.sort(k,sorting); for (int i=0; i<k.length; i++) nameToIndex.put(k[i], i); } Factor[] a = new Factor[names.length]; Factor proto = new Factor(); proto.names = names; proto.nameToIndex = nameToIndex; proto.levels = a; Factor below = new Factor(proto,-1, null, null); Factor above = new Factor(proto,a.length, null, null); below.below = above.below = below; below.above = above.above = above; for (int i=0; i<a.length; i++) a[i] = fun.apply(new Factor(proto,i,below, above)); return a[0]; } public static Factor FALSE = create("false","true"); public static Factor TRUE = FALSE.get(1); /** * Converts a list of strings into a list of factors * @param list * @return */ public static List<Factor> fromStrings(Collection<String> list) { Factor proto = create(new LinkedHashSet<>(list).toArray(new String[0])); return EI.wrap(list).map(proto::get).toCollection(new ArrayList<>()); } public static Factor[] fromStrings(String[] list) { Factor proto = create(list); return EI.wrap(list).map(proto::get).toArray(Factor.class); } public static Factor[] fromStringsSorted(String[] list) { Factor proto = createSorted(list); return EI.wrap(list).map(proto::get).toArray(Factor.class); } private static WeakHashMap<NumericArray, Factor> numericArrayCache = new WeakHashMap<>(); public static Factor fromNumericArray(NumericArray array) { return numericArrayCache.computeIfAbsent(array, Factor::createNumericArrayFactor); } private static Factor createNumericArrayFactor(NumericArray array) { if (array.isIntegral()) return create(array.intIterator().sort().map(i->i+"").toArray(String.class)); return create(array.doubleIterator().sort().map(i->i+"").unique(true).toArray(String.class)); } public static void serialize(BinaryWriter out, Factor[] data) throws IOException { if (data.length==0) { out.putCInt(data.length); return; } out.putCInt(data.length); Factor p = data[0]; out.putCInt(p.names.length); for (String s : p.names) out.putString(s); for (Factor f : data) out.putCInt(f.getIndex()); } public static Factor[] deserialize(BinaryReader in) throws IOException { Factor[] re = new Factor[in.getCInt()]; String[] names = new String[in.getCInt()]; for (int i = 0; i < names.length; i++) names[i] = in.getString(); Factor p = create(names); for (int i = 0; i < re.length; i++) re[i] = p.get(in.getCInt()); return re; } }
package io.grpc.testing.integration; import static io.grpc.MethodDescriptor.generateFullMethodName; import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall; import static io.grpc.stub.ClientCalls.asyncClientStreamingCall; import static io.grpc.stub.ClientCalls.asyncServerStreamingCall; import static io.grpc.stub.ClientCalls.asyncUnaryCall; import static io.grpc.stub.ClientCalls.blockingServerStreamingCall; import static io.grpc.stub.ClientCalls.blockingUnaryCall; import static io.grpc.stub.ClientCalls.futureUnaryCall; import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall; import static io.grpc.stub.ServerCalls.asyncClientStreamingCall; import static io.grpc.stub.ServerCalls.asyncServerStreamingCall; import static io.grpc.stub.ServerCalls.asyncUnaryCall; import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall; import static io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall; /** * <pre> * A service used to control reconnect server. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: io/grpc/testing/integration/test.proto") public final class ReconnectServiceGrpc { private ReconnectServiceGrpc() {} public static final String SERVICE_NAME = "grpc.testing.ReconnectService"; // Static method descriptors that strictly reflect the proto. @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") @java.lang.Deprecated // Use {@link #getStartMethod()} instead. public static final io.grpc.MethodDescriptor<com.google.protobuf.EmptyProtos.Empty, com.google.protobuf.EmptyProtos.Empty> METHOD_START = getStartMethod(); private static volatile io.grpc.MethodDescriptor<com.google.protobuf.EmptyProtos.Empty, com.google.protobuf.EmptyProtos.Empty> getStartMethod; @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") public static io.grpc.MethodDescriptor<com.google.protobuf.EmptyProtos.Empty, com.google.protobuf.EmptyProtos.Empty> getStartMethod() { io.grpc.MethodDescriptor<com.google.protobuf.EmptyProtos.Empty, com.google.protobuf.EmptyProtos.Empty> getStartMethod; if ((getStartMethod = ReconnectServiceGrpc.getStartMethod) == null) { synchronized (ReconnectServiceGrpc.class) { if ((getStartMethod = ReconnectServiceGrpc.getStartMethod) == null) { ReconnectServiceGrpc.getStartMethod = getStartMethod = io.grpc.MethodDescriptor.<com.google.protobuf.EmptyProtos.Empty, com.google.protobuf.EmptyProtos.Empty>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName( "grpc.testing.ReconnectService", "Start")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.EmptyProtos.Empty.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.EmptyProtos.Empty.getDefaultInstance())) .setSchemaDescriptor(new ReconnectServiceMethodDescriptorSupplier("Start")) .build(); } } } return getStartMethod; } @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") @java.lang.Deprecated // Use {@link #getStopMethod()} instead. public static final io.grpc.MethodDescriptor<com.google.protobuf.EmptyProtos.Empty, io.grpc.testing.integration.Messages.ReconnectInfo> METHOD_STOP = getStopMethod(); private static volatile io.grpc.MethodDescriptor<com.google.protobuf.EmptyProtos.Empty, io.grpc.testing.integration.Messages.ReconnectInfo> getStopMethod; @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") public static io.grpc.MethodDescriptor<com.google.protobuf.EmptyProtos.Empty, io.grpc.testing.integration.Messages.ReconnectInfo> getStopMethod() { io.grpc.MethodDescriptor<com.google.protobuf.EmptyProtos.Empty, io.grpc.testing.integration.Messages.ReconnectInfo> getStopMethod; if ((getStopMethod = ReconnectServiceGrpc.getStopMethod) == null) { synchronized (ReconnectServiceGrpc.class) { if ((getStopMethod = ReconnectServiceGrpc.getStopMethod) == null) { ReconnectServiceGrpc.getStopMethod = getStopMethod = io.grpc.MethodDescriptor.<com.google.protobuf.EmptyProtos.Empty, io.grpc.testing.integration.Messages.ReconnectInfo>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName( "grpc.testing.ReconnectService", "Stop")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.EmptyProtos.Empty.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( io.grpc.testing.integration.Messages.ReconnectInfo.getDefaultInstance())) .setSchemaDescriptor(new ReconnectServiceMethodDescriptorSupplier("Stop")) .build(); } } } return getStopMethod; } /** * Creates a new async stub that supports all call types for the service */ public static ReconnectServiceStub newStub(io.grpc.Channel channel) { return new ReconnectServiceStub(channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static ReconnectServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { return new ReconnectServiceBlockingStub(channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static ReconnectServiceFutureStub newFutureStub( io.grpc.Channel channel) { return new ReconnectServiceFutureStub(channel); } /** * <pre> * A service used to control reconnect server. * </pre> */ public static abstract class ReconnectServiceImplBase implements io.grpc.BindableService { /** */ public void start(com.google.protobuf.EmptyProtos.Empty request, io.grpc.stub.StreamObserver<com.google.protobuf.EmptyProtos.Empty> responseObserver) { asyncUnimplementedUnaryCall(getStartMethod(), responseObserver); } /** */ public void stop(com.google.protobuf.EmptyProtos.Empty request, io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.ReconnectInfo> responseObserver) { asyncUnimplementedUnaryCall(getStopMethod(), responseObserver); } @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getStartMethod(), asyncUnaryCall( new MethodHandlers< com.google.protobuf.EmptyProtos.Empty, com.google.protobuf.EmptyProtos.Empty>( this, METHODID_START))) .addMethod( getStopMethod(), asyncUnaryCall( new MethodHandlers< com.google.protobuf.EmptyProtos.Empty, io.grpc.testing.integration.Messages.ReconnectInfo>( this, METHODID_STOP))) .build(); } } /** * <pre> * A service used to control reconnect server. * </pre> */ public static final class ReconnectServiceStub extends io.grpc.stub.AbstractStub<ReconnectServiceStub> { private ReconnectServiceStub(io.grpc.Channel channel) { super(channel); } private ReconnectServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ReconnectServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ReconnectServiceStub(channel, callOptions); } /** */ public void start(com.google.protobuf.EmptyProtos.Empty request, io.grpc.stub.StreamObserver<com.google.protobuf.EmptyProtos.Empty> responseObserver) { asyncUnaryCall( getChannel().newCall(getStartMethod(), getCallOptions()), request, responseObserver); } /** */ public void stop(com.google.protobuf.EmptyProtos.Empty request, io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.ReconnectInfo> responseObserver) { asyncUnaryCall( getChannel().newCall(getStopMethod(), getCallOptions()), request, responseObserver); } } /** * <pre> * A service used to control reconnect server. * </pre> */ public static final class ReconnectServiceBlockingStub extends io.grpc.stub.AbstractStub<ReconnectServiceBlockingStub> { private ReconnectServiceBlockingStub(io.grpc.Channel channel) { super(channel); } private ReconnectServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ReconnectServiceBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ReconnectServiceBlockingStub(channel, callOptions); } /** */ public com.google.protobuf.EmptyProtos.Empty start(com.google.protobuf.EmptyProtos.Empty request) { return blockingUnaryCall( getChannel(), getStartMethod(), getCallOptions(), request); } /** */ public io.grpc.testing.integration.Messages.ReconnectInfo stop(com.google.protobuf.EmptyProtos.Empty request) { return blockingUnaryCall( getChannel(), getStopMethod(), getCallOptions(), request); } } /** * <pre> * A service used to control reconnect server. * </pre> */ public static final class ReconnectServiceFutureStub extends io.grpc.stub.AbstractStub<ReconnectServiceFutureStub> { private ReconnectServiceFutureStub(io.grpc.Channel channel) { super(channel); } private ReconnectServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ReconnectServiceFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ReconnectServiceFutureStub(channel, callOptions); } /** */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.EmptyProtos.Empty> start( com.google.protobuf.EmptyProtos.Empty request) { return futureUnaryCall( getChannel().newCall(getStartMethod(), getCallOptions()), request); } /** */ public com.google.common.util.concurrent.ListenableFuture<io.grpc.testing.integration.Messages.ReconnectInfo> stop( com.google.protobuf.EmptyProtos.Empty request) { return futureUnaryCall( getChannel().newCall(getStopMethod(), getCallOptions()), request); } } private static final int METHODID_START = 0; private static final int METHODID_STOP = 1; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final ReconnectServiceImplBase serviceImpl; private final int methodId; MethodHandlers(ReconnectServiceImplBase serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_START: serviceImpl.start((com.google.protobuf.EmptyProtos.Empty) request, (io.grpc.stub.StreamObserver<com.google.protobuf.EmptyProtos.Empty>) responseObserver); break; case METHODID_STOP: serviceImpl.stop((com.google.protobuf.EmptyProtos.Empty) request, (io.grpc.stub.StreamObserver<io.grpc.testing.integration.Messages.ReconnectInfo>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } private static abstract class ReconnectServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { ReconnectServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return io.grpc.testing.integration.Test.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("ReconnectService"); } } private static final class ReconnectServiceFileDescriptorSupplier extends ReconnectServiceBaseDescriptorSupplier { ReconnectServiceFileDescriptorSupplier() {} } private static final class ReconnectServiceMethodDescriptorSupplier extends ReconnectServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final String methodName; ReconnectServiceMethodDescriptorSupplier(String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (ReconnectServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new ReconnectServiceFileDescriptorSupplier()) .addMethod(getStartMethod()) .addMethod(getStopMethod()) .build(); } } } return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.portable; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Arrays; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.MarshallerContextAdapter; import org.apache.ignite.internal.util.IgniteUtils; import org.apache.ignite.marshaller.portable.PortableMarshaller; import org.apache.ignite.portable.PortableException; import org.apache.ignite.portable.PortableMarshalAware; import org.apache.ignite.portable.PortableMetadata; import org.apache.ignite.portable.PortableReader; import org.apache.ignite.portable.PortableWriter; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; /** * */ public class GridPortableMarshallerCtxDisabledSelfTest extends GridCommonAbstractTest { /** */ protected static final PortableMetaDataHandler META_HND = new PortableMetaDataHandler() { @Override public void addMeta(int typeId, PortableMetadata meta) { // No-op. } @Override public PortableMetadata metadata(int typeId) { return null; } }; /** * @throws Exception If failed. */ public void testObjectExchange() throws Exception { PortableMarshaller marsh = new PortableMarshaller(); marsh.setContext(new MarshallerContextWithNoStorage()); PortableContext context = new PortableContext(META_HND, null); IgniteUtils.invoke(PortableMarshaller.class, marsh, "setPortableContext", context); SimpleObject simpleObj = new SimpleObject(); simpleObj.b = 2; simpleObj.bArr = new byte[] {2, 3, 4, 5, 5}; simpleObj.c = 'A'; simpleObj.enumVal = TestEnum.D; simpleObj.objArr = new Object[] {"hello", "world", "from", "me"}; simpleObj.enumArr = new TestEnum[] {TestEnum.C, TestEnum.B}; SimpleObject otherObj = new SimpleObject(); otherObj.b = 3; otherObj.bArr = new byte[] {5, 3, 4}; simpleObj.otherObj = otherObj; assertEquals(simpleObj, marsh.unmarshal(marsh.marshal(simpleObj), null)); SimplePortable simplePortable = new SimplePortable(); simplePortable.str = "portable"; simplePortable.arr = new long[] {100, 200, 300}; assertEquals(simplePortable, marsh.unmarshal(marsh.marshal(simplePortable), null)); SimpleExternalizable simpleExtr = new SimpleExternalizable(); simpleExtr.str = "externalizable"; simpleExtr.arr = new long[] {20000, 300000, 400000}; assertEquals(simpleExtr, marsh.unmarshal(marsh.marshal(simpleExtr), null)); } /** * Marshaller context with no storage. Platform has to work in such environment as well by marshalling class name of * a portable object. */ private static class MarshallerContextWithNoStorage extends MarshallerContextAdapter { /** */ public MarshallerContextWithNoStorage() { super(null); } /** {@inheritDoc} */ @Override protected boolean registerClassName(int id, String clsName) throws IgniteCheckedException { return false; } /** {@inheritDoc} */ @Override protected String className(int id) throws IgniteCheckedException { return null; } } /** */ private enum TestEnum { A, B, C, D, E } /** */ private static class SimpleObject { /** */ private byte b; /** */ private char c; /** */ private byte[] bArr; /** */ private Object[] objArr; /** */ private TestEnum enumVal; /** */ private TestEnum[] enumArr; private SimpleObject otherObj; /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SimpleObject object = (SimpleObject)o; if (b != object.b) return false; if (c != object.c) return false; if (!Arrays.equals(bArr, object.bArr)) return false; // Probably incorrect - comparing Object[] arrays with Arrays.equals if (!Arrays.equals(objArr, object.objArr)) return false; if (enumVal != object.enumVal) return false; // Probably incorrect - comparing Object[] arrays with Arrays.equals if (!Arrays.equals(enumArr, object.enumArr)) return false; return !(otherObj != null ? !otherObj.equals(object.otherObj) : object.otherObj != null); } } /** * */ private static class SimplePortable implements PortableMarshalAware { /** */ private String str; /** */ private long[] arr; /** {@inheritDoc} */ @Override public void writePortable(PortableWriter writer) throws PortableException { writer.writeString("str", str); writer.writeLongArray("longArr", arr); } /** {@inheritDoc} */ @Override public void readPortable(PortableReader reader) throws PortableException { str = reader.readString("str"); arr = reader.readLongArray("longArr"); } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SimplePortable that = (SimplePortable)o; if (str != null ? !str.equals(that.str) : that.str != null) return false; return Arrays.equals(arr, that.arr); } } /** * */ private static class SimpleExternalizable implements Externalizable { /** */ private String str; /** */ private long[] arr; /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeUTF(str); out.writeObject(arr); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { str = in.readUTF(); arr = (long[])in.readObject(); } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SimpleExternalizable that = (SimpleExternalizable)o; if (str != null ? !str.equals(that.str) : that.str != null) return false; return Arrays.equals(arr, that.arr); } } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.extractor.ogg; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.extractor.ExtractorInput; import com.google.android.exoplayer2.extractor.SeekMap; import com.google.android.exoplayer2.extractor.SeekPoint; import com.google.android.exoplayer2.util.FlacStreamMetadata; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.ParsableByteArray; import com.google.android.exoplayer2.util.Util; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * {@link StreamReader} to extract Flac data out of Ogg byte stream. */ /* package */ final class FlacReader extends StreamReader { private static final byte AUDIO_PACKET_TYPE = (byte) 0xFF; private static final byte SEEKTABLE_PACKET_TYPE = 0x03; private static final int FRAME_HEADER_SAMPLE_NUMBER_OFFSET = 4; private FlacStreamMetadata streamMetadata; private FlacOggSeeker flacOggSeeker; public static boolean verifyBitstreamType(ParsableByteArray data) { return data.bytesLeft() >= 5 && data.readUnsignedByte() == 0x7F && // packet type data.readUnsignedInt() == 0x464C4143; // ASCII signature "FLAC" } @Override protected void reset(boolean headerData) { super.reset(headerData); if (headerData) { streamMetadata = null; flacOggSeeker = null; } } private static boolean isAudioPacket(byte[] data) { return data[0] == AUDIO_PACKET_TYPE; } @Override protected long preparePayload(ParsableByteArray packet) { if (!isAudioPacket(packet.data)) { return -1; } return getFlacFrameBlockSize(packet); } @Override protected boolean readHeaders(ParsableByteArray packet, long position, SetupData setupData) { byte[] data = packet.data; if (streamMetadata == null) { streamMetadata = new FlacStreamMetadata(data, 17); int maxInputSize = streamMetadata.maxFrameSize == 0 ? Format.NO_VALUE : streamMetadata.maxFrameSize; byte[] metadata = Arrays.copyOfRange(data, 9, packet.limit()); metadata[4] = (byte) 0x80; // Set the last metadata block flag, ignore the other blocks List<byte[]> initializationData = Collections.singletonList(metadata); setupData.format = Format.createAudioSampleFormat( /* id= */ null, MimeTypes.AUDIO_FLAC, /* codecs= */ null, streamMetadata.bitRate(), maxInputSize, streamMetadata.channels, streamMetadata.sampleRate, initializationData, /* drmInitData= */ null, /* selectionFlags= */ 0, /* language= */ null); } else if ((data[0] & 0x7F) == SEEKTABLE_PACKET_TYPE) { flacOggSeeker = new FlacOggSeeker(); flacOggSeeker.parseSeekTable(packet); } else if (isAudioPacket(data)) { if (flacOggSeeker != null) { flacOggSeeker.setFirstFrameOffset(position); setupData.oggSeeker = flacOggSeeker; } return false; } return true; } private int getFlacFrameBlockSize(ParsableByteArray packet) { int blockSizeCode = (packet.data[2] & 0xFF) >> 4; switch (blockSizeCode) { case 1: return 192; case 2: case 3: case 4: case 5: return 576 << (blockSizeCode - 2); case 6: case 7: // skip the sample number packet.skipBytes(FRAME_HEADER_SAMPLE_NUMBER_OFFSET); packet.readUtf8EncodedLong(); int value = blockSizeCode == 6 ? packet.readUnsignedByte() : packet.readUnsignedShort(); packet.setPosition(0); return value + 1; case 8: case 9: case 10: case 11: case 12: case 13: case 14: case 15: return 256 << (blockSizeCode - 8); default: return -1; } } private class FlacOggSeeker implements OggSeeker, SeekMap { private static final int METADATA_LENGTH_OFFSET = 1; private static final int SEEK_POINT_SIZE = 18; private long[] seekPointGranules; private long[] seekPointOffsets; private long firstFrameOffset; private long pendingSeekGranule; public FlacOggSeeker() { firstFrameOffset = -1; pendingSeekGranule = -1; } public void setFirstFrameOffset(long firstFrameOffset) { this.firstFrameOffset = firstFrameOffset; } /** * Parses a FLAC file seek table metadata structure and initializes internal fields. * * @param data A {@link ParsableByteArray} including whole seek table metadata block. Its * position should be set to the beginning of the block. * @see <a href="https://xiph.org/flac/format.html#metadata_block_seektable">FLAC format * METADATA_BLOCK_SEEKTABLE</a> */ public void parseSeekTable(ParsableByteArray data) { data.skipBytes(METADATA_LENGTH_OFFSET); int length = data.readUnsignedInt24(); int numberOfSeekPoints = length / SEEK_POINT_SIZE; seekPointGranules = new long[numberOfSeekPoints]; seekPointOffsets = new long[numberOfSeekPoints]; for (int i = 0; i < numberOfSeekPoints; i++) { seekPointGranules[i] = data.readLong(); seekPointOffsets[i] = data.readLong(); data.skipBytes(2); // Skip "Number of samples in the target frame." } } @Override public long read(ExtractorInput input) throws IOException, InterruptedException { if (pendingSeekGranule >= 0) { long result = -(pendingSeekGranule + 2); pendingSeekGranule = -1; return result; } return -1; } @Override public void startSeek(long targetGranule) { int index = Util.binarySearchFloor(seekPointGranules, targetGranule, true, true); pendingSeekGranule = seekPointGranules[index]; } @Override public SeekMap createSeekMap() { return this; } @Override public boolean isSeekable() { return true; } @Override public SeekPoints getSeekPoints(long timeUs) { long granule = convertTimeToGranule(timeUs); int index = Util.binarySearchFloor(seekPointGranules, granule, true, true); long seekTimeUs = convertGranuleToTime(seekPointGranules[index]); long seekPosition = firstFrameOffset + seekPointOffsets[index]; SeekPoint seekPoint = new SeekPoint(seekTimeUs, seekPosition); if (seekTimeUs >= timeUs || index == seekPointGranules.length - 1) { return new SeekPoints(seekPoint); } else { long secondSeekTimeUs = convertGranuleToTime(seekPointGranules[index + 1]); long secondSeekPosition = firstFrameOffset + seekPointOffsets[index + 1]; SeekPoint secondSeekPoint = new SeekPoint(secondSeekTimeUs, secondSeekPosition); return new SeekPoints(seekPoint, secondSeekPoint); } } @Override public long getDurationUs() { return streamMetadata.durationUs(); } } }
package link.ideas.easya.ui.friend_lesson_list; import android.arch.lifecycle.LiveData; import android.arch.lifecycle.Observer; import android.arch.lifecycle.ViewModelProviders; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.ActivityCompat; import android.support.v4.app.ActivityOptionsCompat; import android.support.v4.util.Pair; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.animation.AccelerateDecelerateInterpolator; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.bumptech.glide.Glide; import com.google.firebase.database.DataSnapshot; import java.util.ArrayList; import java.util.List; import link.ideas.easya.data.database.Lesson; import link.ideas.easya.models.User; import link.ideas.easya.ui.friend_couse_list.CourseListFriends; import link.ideas.easya.ui.friend_lesson_detail.LessonDetailFriend; import link.ideas.easya.R; import link.ideas.easya.ui.BaseActivity; import link.ideas.easya.utils.CircleTransform; import link.ideas.easya.utils.Constants; import link.ideas.easya.utils.Helper; import link.ideas.easya.utils.InjectorUtils; public class LessonListFriends extends BaseActivity { public static final String LOG_TAG = CourseListFriends.class.getSimpleName(); TextView emptyView; RecyclerView mRecyclerView; LinearLayout progress; private LessonFriendsAdapter mLessonFriendsAdapter; String coursePushId; ArrayList<Lesson> friendsLesson; ArrayList<String> lessonPushIds; FriendLessonListViewModel viewModel; boolean isLoaded; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Intent intent = getIntent(); coursePushId = intent.getStringExtra(Constants.PREF_COURSE_PUSH_ID); String friendName = intent.getStringExtra(Constants.PREF_FRIEND_ACCOUNT_NAME); setContentView(R.layout.activity_subject_list_friends); setDrawer(true); setUpAPIs(); loadNavHeader(Helper.getFristName(friendName) +getResources().getString(R.string.friend_lesson) ); setUpNavigationView(); initializeScreen(); } private void initializeScreen() { friendsLesson = new ArrayList<Lesson>(); lessonPushIds = new ArrayList<String>(); progress = (LinearLayout) findViewById(R.id.lin_Progress); mRecyclerView = (RecyclerView) findViewById(R.id.recyclerview_lesson); mRecyclerView.setLayoutManager(new LinearLayoutManager(this)); mRecyclerView.setHasFixedSize(true); emptyView = (TextView) findViewById(R.id.empty_tv); mLessonFriendsAdapter = new LessonFriendsAdapter(friendsLesson, lessonPushIds, this, new LessonFriendsAdapter.CourseAdapterFriendsOnClickHolder() { @Override public void onClick(String lessonPushId, Lesson lesson,LessonFriendsAdapter.CourseAdapterFriendsViewHolder vh) { Intent intent = new Intent(LessonListFriends.this, LessonDetailFriend.class); intent.putExtra(Constants.PREF_COURSE_PUSH_ID ,coursePushId); intent.putExtra(Constants.PREF_LESSON_PUSH_ID ,lessonPushId); intent.putExtra(Constants.PREF_LESSON_OBJECT, lesson); ActivityOptionsCompat activityOptions = ActivityOptionsCompat.makeSceneTransitionAnimation(LessonListFriends.this, new Pair<View, String>(vh.lessonImage, getString(R.string.shared_element))); ActivityCompat.startActivity(LessonListFriends.this, intent, activityOptions.toBundle()); startActivity(intent); } }); mRecyclerView.setAdapter(mLessonFriendsAdapter); FriendLessonListFactory factory = InjectorUtils.provideFriendLessonListViewModelFactory(coursePushId); viewModel = ViewModelProviders.of(this,factory).get(FriendLessonListViewModel.class); if (isDeviceOnline()) { attachDatabaseReadListener(); }else { deviceOffline(); } } private void attachDatabaseReadListener() { progress.setVisibility(View.VISIBLE); LiveData<DataSnapshot> liveData = viewModel.getDataSnapshotLiveData(); liveData.observe(this, new Observer<DataSnapshot>() { @Override public void onChanged(@Nullable DataSnapshot dataSnapshot) { for (DataSnapshot childDataSnapshot : dataSnapshot.getChildren()) { Lesson lesson = childDataSnapshot.getValue(Lesson.class); friendsLesson.add(lesson); lessonPushIds.add(childDataSnapshot.getKey()); } mLessonFriendsAdapter.notifyDataSetChanged(); if (friendsLesson.size() == 0) { emptyView.setVisibility(View.VISIBLE); } else { emptyView.setVisibility(View.GONE); } progress.setVisibility(View.GONE); startIntroAnimation(); isLoaded = true; } }); } @Override protected void onDestroy() { super.onDestroy(); } private void startIntroAnimation() { mRecyclerView.setTranslationY( getResources().getDimensionPixelSize(R.dimen.list_item_lesson)); mRecyclerView.setAlpha(0f); mRecyclerView.animate() .translationY(0) .setDuration(500) .alpha(1f) .setInterpolator(new AccelerateDecelerateInterpolator()) .start(); } @Override protected void onResume() { super.onResume(); if (isLoaded) startIntroAnimation(); } /** * Created by Eman on 4/23/2017. */ public static class UserFriendsAdapter extends RecyclerView.Adapter<UserFriendsAdapter.UserFriendsAdapterViewHolder> { final private Context mContext; final private UserFriendsAdapterOnClickHolder mClickHolder; private List<User> userList; private List<String> email; private String userEmail; public class UserFriendsAdapterViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener { public final TextView friendEmail; public final TextView friendName; public final ImageView friendImage; public UserFriendsAdapterViewHolder(View view) { super(view); friendEmail = (TextView) view.findViewById(R.id.tv_friend_email); friendName = (TextView) view.findViewById(R.id.tv_friend_name); friendImage = (ImageView) view.findViewById(R.id.iv_friend_image); view.setOnClickListener(this); } @Override public void onClick(View v) { int adapterPostion = getAdapterPosition(); String name = userList.get(adapterPostion).getName(); String friendEmail = email.get(adapterPostion); mClickHolder.onClick(friendEmail, name,this); } } public UserFriendsAdapter(List<User> userList, List<String> email, String userEmail, Context context, UserFriendsAdapterOnClickHolder dh) { mContext = context; mClickHolder = dh; this.userList = userList; this.email = email; this.userEmail = userEmail; } public static interface UserFriendsAdapterOnClickHolder { void onClick(String friendEmail, String friendName, UserFriendsAdapterViewHolder vh); } @Override public UserFriendsAdapterViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { if (parent instanceof RecyclerView) { View view = LayoutInflater.from(parent.getContext()). inflate(R.layout.list_item_friend, parent, false); view.setFocusable(false); return new UserFriendsAdapterViewHolder(view); } else { throw new RuntimeException("Not bind th RecyclerView"); } } @Override public void onBindViewHolder(final UserFriendsAdapterViewHolder holder, int position) { final User user = userList.get(position); final String friendEmail = email.get(position); String email = Helper.decodeEmail(friendEmail); holder.friendEmail.setText(email); holder.friendEmail.setContentDescription(mContext.getString(R.string.a11y_email_button, email)); String name = user.getName(); holder.friendName.setText(name); holder.friendName.setContentDescription(mContext.getString(R.string.a11y_name_button, name)); Glide.with(mContext).load(user.getPhotoUrl()) .transform(new CircleTransform(mContext)) .error(R.drawable.ic_account_circle_black_24dp) .into(holder.friendImage); } @Override public int getItemCount() { return userList.size(); } } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.math; import static com.google.common.math.MathTesting.ALL_LONG_CANDIDATES; import static com.google.common.math.MathTesting.ALL_ROUNDING_MODES; import static com.google.common.math.MathTesting.ALL_SAFE_ROUNDING_MODES; import static com.google.common.math.MathTesting.EXPONENTS; import static com.google.common.math.MathTesting.NEGATIVE_INTEGER_CANDIDATES; import static com.google.common.math.MathTesting.NEGATIVE_LONG_CANDIDATES; import static com.google.common.math.MathTesting.NONZERO_LONG_CANDIDATES; import static com.google.common.math.MathTesting.POSITIVE_INTEGER_CANDIDATES; import static com.google.common.math.MathTesting.POSITIVE_LONG_CANDIDATES; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import static java.math.BigInteger.valueOf; import static java.math.RoundingMode.FLOOR; import static java.math.RoundingMode.UNNECESSARY; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.testing.NullPointerTester; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; import java.util.EnumSet; import java.util.Random; import junit.framework.TestCase; /** * Tests for LongMath. * * @author Louis Wasserman */ @GwtCompatible(emulated = true) public class LongMathTest extends TestCase { @SuppressWarnings("ConstantOverflow") public void testMaxSignedPowerOfTwo() { assertTrue(LongMath.isPowerOfTwo(LongMath.MAX_SIGNED_POWER_OF_TWO)); assertFalse(LongMath.isPowerOfTwo(LongMath.MAX_SIGNED_POWER_OF_TWO * 2)); } public void testCeilingPowerOfTwo() { for (long x : POSITIVE_LONG_CANDIDATES) { BigInteger expectedResult = BigIntegerMath.ceilingPowerOfTwo(BigInteger.valueOf(x)); if (fitsInLong(expectedResult)) { assertEquals(expectedResult.longValue(), LongMath.ceilingPowerOfTwo(x)); } else { try { LongMath.ceilingPowerOfTwo(x); fail("Expected ArithmeticException"); } catch (ArithmeticException expected) { } } } } public void testFloorPowerOfTwo() { for (long x : POSITIVE_LONG_CANDIDATES) { BigInteger expectedResult = BigIntegerMath.floorPowerOfTwo(BigInteger.valueOf(x)); assertEquals(expectedResult.longValue(), LongMath.floorPowerOfTwo(x)); } } public void testCeilingPowerOfTwoNegative() { for (long x : NEGATIVE_LONG_CANDIDATES) { try { LongMath.ceilingPowerOfTwo(x); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } public void testFloorPowerOfTwoNegative() { for (long x : NEGATIVE_LONG_CANDIDATES) { try { LongMath.floorPowerOfTwo(x); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } public void testCeilingPowerOfTwoZero() { try { LongMath.ceilingPowerOfTwo(0L); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } public void testFloorPowerOfTwoZero() { try { LongMath.floorPowerOfTwo(0L); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } @GwtIncompatible // TODO public void testConstantMaxPowerOfSqrt2Unsigned() { assertEquals( /*expected=*/ BigIntegerMath.sqrt(BigInteger.ZERO.setBit(2 * Long.SIZE - 1), FLOOR) .longValue(), /*actual=*/ LongMath.MAX_POWER_OF_SQRT2_UNSIGNED); } @GwtIncompatible // BigIntegerMath // TODO(cpovirk): GWT-enable BigIntegerMath public void testMaxLog10ForLeadingZeros() { for (int i = 0; i < Long.SIZE; i++) { assertEquals( BigIntegerMath.log10(BigInteger.ONE.shiftLeft(Long.SIZE - i), FLOOR), LongMath.maxLog10ForLeadingZeros[i]); } } @GwtIncompatible // TODO public void testConstantsPowersOf10() { for (int i = 0; i < LongMath.powersOf10.length; i++) { assertEquals(LongMath.checkedPow(10, i), LongMath.powersOf10[i]); } try { LongMath.checkedPow(10, LongMath.powersOf10.length); fail("Expected ArithmeticException"); } catch (ArithmeticException expected) { } } @GwtIncompatible // TODO public void testConstantsHalfPowersOf10() { for (int i = 0; i < LongMath.halfPowersOf10.length; i++) { assertEquals( BigIntegerMath.sqrt(BigInteger.TEN.pow(2 * i + 1), FLOOR), BigInteger.valueOf(LongMath.halfPowersOf10[i])); } BigInteger nextBigger = BigIntegerMath.sqrt(BigInteger.TEN.pow(2 * LongMath.halfPowersOf10.length + 1), FLOOR); assertTrue(nextBigger.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0); } @GwtIncompatible // TODO public void testConstantsSqrtMaxLong() { assertEquals( /*expected=*/ LongMath.sqrt(Long.MAX_VALUE, FLOOR), /*actual=*/ LongMath.FLOOR_SQRT_MAX_LONG); } @GwtIncompatible // TODO public void testConstantsFactorials() { long expected = 1; for (int i = 0; i < LongMath.factorials.length; i++, expected *= i) { assertEquals(expected, LongMath.factorials[i]); } try { LongMath.checkedMultiply( LongMath.factorials[LongMath.factorials.length - 1], LongMath.factorials.length); fail("Expected ArithmeticException"); } catch (ArithmeticException expect) { } } @GwtIncompatible // TODO public void testConstantsBiggestBinomials() { for (int k = 0; k < LongMath.biggestBinomials.length; k++) { assertTrue(fitsInLong(BigIntegerMath.binomial(LongMath.biggestBinomials[k], k))); assertTrue( LongMath.biggestBinomials[k] == Integer.MAX_VALUE || !fitsInLong(BigIntegerMath.binomial(LongMath.biggestBinomials[k] + 1, k))); // In the first case, any long is valid; in the second, we want to test that the next-bigger // long overflows. } int k = LongMath.biggestBinomials.length; assertFalse(fitsInLong(BigIntegerMath.binomial(2 * k, k))); // 2 * k is the smallest value for which we don't replace k with (n-k). } @GwtIncompatible // TODO public void testConstantsBiggestSimpleBinomials() { for (int k = 0; k < LongMath.biggestSimpleBinomials.length; k++) { assertTrue(LongMath.biggestSimpleBinomials[k] <= LongMath.biggestBinomials[k]); long unused = simpleBinomial(LongMath.biggestSimpleBinomials[k], k); // mustn't throw if (LongMath.biggestSimpleBinomials[k] < Integer.MAX_VALUE) { // unless all n are fair game with this k try { simpleBinomial(LongMath.biggestSimpleBinomials[k] + 1, k); fail("Expected ArithmeticException"); } catch (ArithmeticException expected) { } } } try { int k = LongMath.biggestSimpleBinomials.length; simpleBinomial(2 * k, k); // 2 * k is the smallest value for which we don't replace k with (n-k). fail("Expected ArithmeticException"); } catch (ArithmeticException expected) { } } @AndroidIncompatible // slow public void testLessThanBranchFree() { for (long x : ALL_LONG_CANDIDATES) { for (long y : ALL_LONG_CANDIDATES) { BigInteger difference = BigInteger.valueOf(x).subtract(BigInteger.valueOf(y)); if (fitsInLong(difference)) { int expected = (x < y) ? 1 : 0; int actual = LongMath.lessThanBranchFree(x, y); assertEquals(expected, actual); } } } } // Throws an ArithmeticException if "the simple implementation" of binomial coefficients overflows @GwtIncompatible // TODO private long simpleBinomial(int n, int k) { long accum = 1; for (int i = 0; i < k; i++) { accum = LongMath.checkedMultiply(accum, n - i); accum /= i + 1; } return accum; } @GwtIncompatible // java.math.BigInteger public void testIsPowerOfTwo() { for (long x : ALL_LONG_CANDIDATES) { // Checks for a single bit set. BigInteger bigX = BigInteger.valueOf(x); boolean expected = (bigX.signum() > 0) && (bigX.bitCount() == 1); assertEquals(expected, LongMath.isPowerOfTwo(x)); } } public void testLog2ZeroAlwaysThrows() { for (RoundingMode mode : ALL_ROUNDING_MODES) { try { LongMath.log2(0L, mode); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } public void testLog2NegativeAlwaysThrows() { for (long x : NEGATIVE_LONG_CANDIDATES) { for (RoundingMode mode : ALL_ROUNDING_MODES) { try { LongMath.log2(x, mode); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } } /* Relies on the correctness of BigIntegerMath.log2 for all modes except UNNECESSARY. */ public void testLog2MatchesBigInteger() { for (long x : POSITIVE_LONG_CANDIDATES) { for (RoundingMode mode : ALL_SAFE_ROUNDING_MODES) { // The BigInteger implementation is tested separately, use it as the reference. assertEquals(BigIntegerMath.log2(valueOf(x), mode), LongMath.log2(x, mode)); } } } /* Relies on the correctness of isPowerOfTwo(long). */ public void testLog2Exact() { for (long x : POSITIVE_LONG_CANDIDATES) { // We only expect an exception if x was not a power of 2. boolean isPowerOf2 = LongMath.isPowerOfTwo(x); try { assertEquals(x, 1L << LongMath.log2(x, UNNECESSARY)); assertTrue(isPowerOf2); } catch (ArithmeticException e) { assertFalse(isPowerOf2); } } } @GwtIncompatible // TODO public void testLog10ZeroAlwaysThrows() { for (RoundingMode mode : ALL_ROUNDING_MODES) { try { LongMath.log10(0L, mode); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } @GwtIncompatible // TODO public void testLog10NegativeAlwaysThrows() { for (long x : NEGATIVE_LONG_CANDIDATES) { for (RoundingMode mode : ALL_ROUNDING_MODES) { try { LongMath.log10(x, mode); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } } // Relies on the correctness of BigIntegerMath.log10 for all modes except UNNECESSARY. @GwtIncompatible // TODO public void testLog10MatchesBigInteger() { for (long x : POSITIVE_LONG_CANDIDATES) { for (RoundingMode mode : ALL_SAFE_ROUNDING_MODES) { assertEquals(BigIntegerMath.log10(valueOf(x), mode), LongMath.log10(x, mode)); } } } // Relies on the correctness of log10(long, FLOOR) and of pow(long, int). @GwtIncompatible // TODO public void testLog10Exact() { for (long x : POSITIVE_LONG_CANDIDATES) { int floor = LongMath.log10(x, FLOOR); boolean expectedSuccess = LongMath.pow(10, floor) == x; try { assertEquals(floor, LongMath.log10(x, UNNECESSARY)); assertTrue(expectedSuccess); } catch (ArithmeticException e) { if (expectedSuccess) { failFormat("expected log10(%s, UNNECESSARY) = %s; got ArithmeticException", x, floor); } } } } @GwtIncompatible // TODO public void testLog10TrivialOnPowerOf10() { long x = 1000000000000L; for (RoundingMode mode : ALL_ROUNDING_MODES) { assertEquals(12, LongMath.log10(x, mode)); } } @GwtIncompatible // TODO public void testSqrtNegativeAlwaysThrows() { for (long x : NEGATIVE_LONG_CANDIDATES) { for (RoundingMode mode : ALL_ROUNDING_MODES) { try { LongMath.sqrt(x, mode); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } } // Relies on the correctness of BigIntegerMath.sqrt for all modes except UNNECESSARY. @GwtIncompatible // TODO public void testSqrtMatchesBigInteger() { for (long x : POSITIVE_LONG_CANDIDATES) { for (RoundingMode mode : ALL_SAFE_ROUNDING_MODES) { // Promote the long value (rather than using longValue() on the expected value) to avoid // any risk of truncation which could lead to a false positive. assertEquals(BigIntegerMath.sqrt(valueOf(x), mode), valueOf(LongMath.sqrt(x, mode))); } } } /* Relies on the correctness of sqrt(long, FLOOR). */ @GwtIncompatible // TODO public void testSqrtExactMatchesFloorOrThrows() { for (long x : POSITIVE_LONG_CANDIDATES) { long sqrtFloor = LongMath.sqrt(x, FLOOR); // We only expect an exception if x was not a perfect square. boolean isPerfectSquare = (sqrtFloor * sqrtFloor == x); try { assertEquals(sqrtFloor, LongMath.sqrt(x, UNNECESSARY)); assertTrue(isPerfectSquare); } catch (ArithmeticException e) { assertFalse(isPerfectSquare); } } } @GwtIncompatible // TODO public void testPow() { for (long i : ALL_LONG_CANDIDATES) { for (int exp : EXPONENTS) { assertEquals(LongMath.pow(i, exp), valueOf(i).pow(exp).longValue()); } } } @GwtIncompatible // TODO @AndroidIncompatible // TODO(cpovirk): File BigDecimal.divide() rounding bug. public void testDivNonZero() { for (long p : NONZERO_LONG_CANDIDATES) { for (long q : NONZERO_LONG_CANDIDATES) { for (RoundingMode mode : ALL_SAFE_ROUNDING_MODES) { long expected = new BigDecimal(valueOf(p)).divide(new BigDecimal(valueOf(q)), 0, mode).longValue(); long actual = LongMath.divide(p, q, mode); if (expected != actual) { failFormat("expected divide(%s, %s, %s) = %s; got %s", p, q, mode, expected, actual); } } } } } @GwtIncompatible // TODO @AndroidIncompatible // Bug in older versions of Android we test against, since fixed. public void testDivNonZeroExact() { for (long p : NONZERO_LONG_CANDIDATES) { for (long q : NONZERO_LONG_CANDIDATES) { boolean expectedSuccess = (p % q) == 0L; try { assertEquals(p, LongMath.divide(p, q, UNNECESSARY) * q); assertTrue(expectedSuccess); } catch (ArithmeticException e) { if (expectedSuccess) { failFormat( "expected divide(%s, %s, UNNECESSARY) to succeed; got ArithmeticException", p, q); } } } } } @GwtIncompatible // TODO public void testZeroDivIsAlwaysZero() { for (long q : NONZERO_LONG_CANDIDATES) { for (RoundingMode mode : ALL_ROUNDING_MODES) { assertEquals(0L, LongMath.divide(0L, q, mode)); } } } @GwtIncompatible // TODO public void testDivByZeroAlwaysFails() { for (long p : ALL_LONG_CANDIDATES) { for (RoundingMode mode : ALL_ROUNDING_MODES) { try { LongMath.divide(p, 0L, mode); fail("Expected ArithmeticException"); } catch (ArithmeticException expected) { } } } } @GwtIncompatible // TODO public void testIntMod() { for (long x : ALL_LONG_CANDIDATES) { for (int m : POSITIVE_INTEGER_CANDIDATES) { assertEquals(valueOf(x).mod(valueOf(m)).intValue(), LongMath.mod(x, m)); } } } @GwtIncompatible // TODO public void testIntModNegativeModulusFails() { for (long x : ALL_LONG_CANDIDATES) { for (int m : NEGATIVE_INTEGER_CANDIDATES) { try { LongMath.mod(x, m); fail("Expected ArithmeticException"); } catch (ArithmeticException expected) { } } } } @GwtIncompatible // TODO public void testIntModZeroModulusFails() { for (long x : ALL_LONG_CANDIDATES) { try { LongMath.mod(x, 0); fail("Expected AE"); } catch (ArithmeticException expected) { } } } @AndroidIncompatible // slow @GwtIncompatible // TODO public void testMod() { for (long x : ALL_LONG_CANDIDATES) { for (long m : POSITIVE_LONG_CANDIDATES) { assertEquals(valueOf(x).mod(valueOf(m)).longValue(), LongMath.mod(x, m)); } } } @GwtIncompatible // TODO public void testModNegativeModulusFails() { for (long x : ALL_LONG_CANDIDATES) { for (long m : NEGATIVE_LONG_CANDIDATES) { try { LongMath.mod(x, m); fail("Expected ArithmeticException"); } catch (ArithmeticException expected) { } } } } public void testGCDExhaustive() { for (long a : POSITIVE_LONG_CANDIDATES) { for (long b : POSITIVE_LONG_CANDIDATES) { assertEquals(valueOf(a).gcd(valueOf(b)), valueOf(LongMath.gcd(a, b))); } } } @GwtIncompatible // TODO public void testGCDZero() { for (long a : POSITIVE_LONG_CANDIDATES) { assertEquals(a, LongMath.gcd(a, 0)); assertEquals(a, LongMath.gcd(0, a)); } assertEquals(0, LongMath.gcd(0, 0)); } @GwtIncompatible // TODO public void testGCDNegativePositiveThrows() { for (long a : NEGATIVE_LONG_CANDIDATES) { try { LongMath.gcd(a, 3); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { LongMath.gcd(3, a); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } @GwtIncompatible // TODO public void testGCDNegativeZeroThrows() { for (long a : NEGATIVE_LONG_CANDIDATES) { try { LongMath.gcd(a, 0); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { LongMath.gcd(0, a); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } @AndroidIncompatible // slow @GwtIncompatible // TODO public void testCheckedAdd() { for (long a : ALL_LONG_CANDIDATES) { for (long b : ALL_LONG_CANDIDATES) { BigInteger expectedResult = valueOf(a).add(valueOf(b)); boolean expectedSuccess = fitsInLong(expectedResult); try { assertEquals(a + b, LongMath.checkedAdd(a, b)); assertTrue(expectedSuccess); } catch (ArithmeticException e) { if (expectedSuccess) { failFormat( "expected checkedAdd(%s, %s) = %s; got ArithmeticException", a, b, expectedResult); } } } } } @GwtIncompatible // TODO @AndroidIncompatible // slow public void testCheckedSubtract() { for (long a : ALL_LONG_CANDIDATES) { for (long b : ALL_LONG_CANDIDATES) { BigInteger expectedResult = valueOf(a).subtract(valueOf(b)); boolean expectedSuccess = fitsInLong(expectedResult); try { assertEquals(a - b, LongMath.checkedSubtract(a, b)); assertTrue(expectedSuccess); } catch (ArithmeticException e) { if (expectedSuccess) { failFormat( "expected checkedSubtract(%s, %s) = %s; got ArithmeticException", a, b, expectedResult); } } } } } @AndroidIncompatible // slow public void testCheckedMultiply() { boolean isAndroid = TestPlatform.isAndroid(); for (long a : ALL_LONG_CANDIDATES) { for (long b : ALL_LONG_CANDIDATES) { if (isAndroid && a == -4294967296L && b == 2147483648L) { /* * Bug in older versions of Android we test against, since fixed: -9223372036854775808L / * -4294967296L = -9223372036854775808L! * * To be clear, this bug affects not the test's computation of the expected result but the * _actual prod code_. But it probably affects only unusual cases. */ continue; } BigInteger expectedResult = valueOf(a).multiply(valueOf(b)); boolean expectedSuccess = fitsInLong(expectedResult); try { assertEquals(a * b, LongMath.checkedMultiply(a, b)); assertTrue(expectedSuccess); } catch (ArithmeticException e) { if (expectedSuccess) { failFormat( "expected checkedMultiply(%s, %s) = %s; got ArithmeticException", a, b, expectedResult); } } } } } @GwtIncompatible // TODO public void testCheckedPow() { for (long b : ALL_LONG_CANDIDATES) { for (int exp : EXPONENTS) { BigInteger expectedResult = valueOf(b).pow(exp); boolean expectedSuccess = fitsInLong(expectedResult); try { assertEquals(expectedResult.longValue(), LongMath.checkedPow(b, exp)); assertTrue(expectedSuccess); } catch (ArithmeticException e) { if (expectedSuccess) { failFormat( "expected checkedPow(%s, %s) = %s; got ArithmeticException", b, exp, expectedResult); } } } } } @AndroidIncompatible // slow @GwtIncompatible // TODO public void testSaturatedAdd() { for (long a : ALL_LONG_CANDIDATES) { for (long b : ALL_LONG_CANDIDATES) { assertOperationEquals( a, b, "s+", saturatedCast(valueOf(a).add(valueOf(b))), LongMath.saturatedAdd(a, b)); } } } @AndroidIncompatible // slow @GwtIncompatible // TODO public void testSaturatedSubtract() { for (long a : ALL_LONG_CANDIDATES) { for (long b : ALL_LONG_CANDIDATES) { assertOperationEquals( a, b, "s-", saturatedCast(valueOf(a).subtract(valueOf(b))), LongMath.saturatedSubtract(a, b)); } } } @AndroidIncompatible // slow @GwtIncompatible // TODO public void testSaturatedMultiply() { for (long a : ALL_LONG_CANDIDATES) { for (long b : ALL_LONG_CANDIDATES) { assertOperationEquals( a, b, "s*", saturatedCast(valueOf(a).multiply(valueOf(b))), LongMath.saturatedMultiply(a, b)); } } } @GwtIncompatible // TODO public void testSaturatedPow() { for (long a : ALL_LONG_CANDIDATES) { for (int b : EXPONENTS) { assertOperationEquals( a, b, "s^", saturatedCast(valueOf(a).pow(b)), LongMath.saturatedPow(a, b)); } } } private void assertOperationEquals(long a, long b, String op, long expected, long actual) { if (expected != actual) { fail("Expected for " + a + " " + op + " " + b + " = " + expected + ", but got " + actual); } } // Depends on the correctness of BigIntegerMath.factorial. @GwtIncompatible // TODO public void testFactorial() { for (int n = 0; n <= 50; n++) { BigInteger expectedBig = BigIntegerMath.factorial(n); long expectedLong = fitsInLong(expectedBig) ? expectedBig.longValue() : Long.MAX_VALUE; assertEquals(expectedLong, LongMath.factorial(n)); } } @GwtIncompatible // TODO public void testFactorialNegative() { for (int n : NEGATIVE_INTEGER_CANDIDATES) { try { LongMath.factorial(n); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } // Depends on the correctness of BigIntegerMath.binomial. public void testBinomial() { for (int n = 0; n <= 70; n++) { for (int k = 0; k <= n; k++) { BigInteger expectedBig = BigIntegerMath.binomial(n, k); long expectedLong = fitsInLong(expectedBig) ? expectedBig.longValue() : Long.MAX_VALUE; assertEquals(expectedLong, LongMath.binomial(n, k)); } } } @GwtIncompatible // Slow public void testBinomial_exhaustiveNotOverflowing() { // Tests all of the inputs to LongMath.binomial that won't cause it to overflow, that weren't // tested in the previous method, for k >= 3. for (int k = 3; k < LongMath.biggestBinomials.length; k++) { for (int n = 70; n <= LongMath.biggestBinomials[k]; n++) { assertEquals(BigIntegerMath.binomial(n, k).longValue(), LongMath.binomial(n, k)); } } } public void testBinomialOutside() { for (int n = 0; n <= 50; n++) { try { LongMath.binomial(n, -1); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { LongMath.binomial(n, n + 1); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } public void testBinomialNegative() { for (int n : NEGATIVE_INTEGER_CANDIDATES) { try { LongMath.binomial(n, 0); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } } @GwtIncompatible // far too slow public void testSqrtOfPerfectSquareAsDoubleIsPerfect() { // This takes just over a minute on my machine. for (long n = 0; n <= LongMath.FLOOR_SQRT_MAX_LONG; n++) { long actual = (long) Math.sqrt(n * n); assertTrue(actual == n); } } public void testSqrtOfLongIsAtMostFloorSqrtMaxLong() { long sqrtMaxLong = (long) Math.sqrt(Long.MAX_VALUE); assertTrue(sqrtMaxLong <= LongMath.FLOOR_SQRT_MAX_LONG); } @AndroidIncompatible // slow @GwtIncompatible // java.math.BigInteger public void testMean() { // Odd-sized ranges have an obvious mean assertMean(2, 1, 3); assertMean(-2, -3, -1); assertMean(0, -1, 1); assertMean(1, -1, 3); assertMean((1L << 62) - 1, -1, Long.MAX_VALUE); // Even-sized ranges should prefer the lower mean assertMean(2, 1, 4); assertMean(-3, -4, -1); assertMean(0, -1, 2); assertMean(0, Long.MIN_VALUE + 2, Long.MAX_VALUE); assertMean(0, 0, 1); assertMean(-1, -1, 0); assertMean(-1, Long.MIN_VALUE, Long.MAX_VALUE); // x == y == mean assertMean(1, 1, 1); assertMean(0, 0, 0); assertMean(-1, -1, -1); assertMean(Long.MIN_VALUE, Long.MIN_VALUE, Long.MIN_VALUE); assertMean(Long.MAX_VALUE, Long.MAX_VALUE, Long.MAX_VALUE); // Exhaustive checks for (long x : ALL_LONG_CANDIDATES) { for (long y : ALL_LONG_CANDIDATES) { assertMean(x, y); } } } /** Helper method that asserts the arithmetic mean of x and y is equal to the expectedMean. */ private static void assertMean(long expectedMean, long x, long y) { assertEquals( "The expectedMean should be the same as computeMeanSafely", expectedMean, computeMeanSafely(x, y)); assertMean(x, y); } /** * Helper method that asserts the arithmetic mean of x and y is equal to the result of * computeMeanSafely. */ private static void assertMean(long x, long y) { long expectedMean = computeMeanSafely(x, y); assertEquals(expectedMean, LongMath.mean(x, y)); assertEquals( "The mean of x and y should equal the mean of y and x", expectedMean, LongMath.mean(y, x)); } /** * Computes the mean in a way that is obvious and resilient to overflow by using BigInteger * arithmetic. */ private static long computeMeanSafely(long x, long y) { BigInteger bigX = BigInteger.valueOf(x); BigInteger bigY = BigInteger.valueOf(y); BigDecimal bigMean = new BigDecimal(bigX.add(bigY)).divide(BigDecimal.valueOf(2), BigDecimal.ROUND_FLOOR); // parseInt blows up on overflow as opposed to intValue() which does not. return Long.parseLong(bigMean.toString()); } private static boolean fitsInLong(BigInteger big) { return big.bitLength() <= 63; } private static final BigInteger MAX_LONG = BigInteger.valueOf(Long.MAX_VALUE); private static final BigInteger MIN_LONG = BigInteger.valueOf(Long.MIN_VALUE); private static long saturatedCast(BigInteger big) { if (big.compareTo(MAX_LONG) > 0) { return Long.MAX_VALUE; } if (big.compareTo(MIN_LONG) < 0) { return Long.MIN_VALUE; } return big.longValue(); } @GwtIncompatible // NullPointerTester public void testNullPointers() { NullPointerTester tester = new NullPointerTester(); tester.setDefault(int.class, 1); tester.setDefault(long.class, 1L); tester.testAllPublicStaticMethods(LongMath.class); } @GwtIncompatible // isPrime is GWT-incompatible public void testIsPrimeSmall() { // Check the first 1000 integers for (int i = 2; i < 1000; i++) { assertEquals(BigInteger.valueOf(i).isProbablePrime(100), LongMath.isPrime(i)); } } @GwtIncompatible // isPrime is GWT-incompatible public void testIsPrimeManyConstants() { // Test the thorough test inputs, which also includes special constants in the Miller-Rabin // tests. for (long l : POSITIVE_LONG_CANDIDATES) { assertEquals(BigInteger.valueOf(l).isProbablePrime(100), LongMath.isPrime(l)); } } @GwtIncompatible // isPrime is GWT-incompatible public void testIsPrimeOnUniformRandom() { Random rand = new Random(1); for (int bits = 10; bits < 63; bits++) { for (int i = 0; i < 2000; i++) { // A random long between 0 and Long.MAX_VALUE, inclusive. long l = rand.nextLong() & ((1L << bits) - 1); assertEquals(BigInteger.valueOf(l).isProbablePrime(100), LongMath.isPrime(l)); } } } @GwtIncompatible // isPrime is GWT-incompatible public void testIsPrimeOnRandomPrimes() { Random rand = new Random(1); for (int bits = 10; bits < 63; bits++) { for (int i = 0; i < 100; i++) { long p = BigInteger.probablePrime(bits, rand).longValue(); assertTrue(LongMath.isPrime(p)); } } } @GwtIncompatible // isPrime is GWT-incompatible public void testIsPrimeOnRandomComposites() { Random rand = new Random(1); for (int bits = 5; bits < 32; bits++) { for (int i = 0; i < 100; i++) { long p = BigInteger.probablePrime(bits, rand).longValue(); long q = BigInteger.probablePrime(bits, rand).longValue(); assertFalse(LongMath.isPrime(p * q)); } } } @GwtIncompatible // isPrime is GWT-incompatible public void testIsPrimeThrowsOnNegative() { try { LongMath.isPrime(-1); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException expected) { } } private static final long[] roundToDoubleTestCandidates = { 0, 16, 1L << 53, (1L << 53) + 1, (1L << 53) + 2, (1L << 53) + 3, (1L << 53) + 4, 1L << 54, (1L << 54) + 1, (1L << 54) + 2, (1L << 54) + 3, (1L << 54) + 4, 0x7ffffffffffffe00L, // halfway between 2^63 and next-lower double 0x7ffffffffffffe01L, // above + 1 0x7ffffffffffffdffL, // above - 1 Long.MAX_VALUE - (1L << 11) + 1, Long.MAX_VALUE - 2, Long.MAX_VALUE - 1, Long.MAX_VALUE, -16, -1L << 53, -(1L << 53) - 1, -(1L << 53) - 2, -(1L << 53) - 3, -(1L << 53) - 4, -1L << 54, -(1L << 54) - 1, -(1L << 54) - 2, -(1L << 54) - 3, -(1L << 54) - 4, Long.MIN_VALUE + 2, Long.MIN_VALUE + 1, Long.MIN_VALUE }; @GwtIncompatible public void testRoundToDoubleAgainstBigInteger() { for (RoundingMode roundingMode : EnumSet.complementOf(EnumSet.of(UNNECESSARY))) { for (long candidate : roundToDoubleTestCandidates) { assertThat(LongMath.roundToDouble(candidate, roundingMode)) .isEqualTo(BigIntegerMath.roundToDouble(BigInteger.valueOf(candidate), roundingMode)); } } } @GwtIncompatible public void testRoundToDoubleAgainstBigIntegerUnnecessary() { for (long candidate : roundToDoubleTestCandidates) { Double expectedDouble = null; try { expectedDouble = BigIntegerMath.roundToDouble(BigInteger.valueOf(candidate), UNNECESSARY); } catch (ArithmeticException expected) { // do nothing } if (expectedDouble != null) { assertThat(LongMath.roundToDouble(candidate, UNNECESSARY)).isEqualTo(expectedDouble); } else { try { LongMath.roundToDouble(candidate, UNNECESSARY); fail("Expected ArithmeticException on roundToDouble(" + candidate + ", UNNECESSARY)"); } catch (ArithmeticException expected) { // success } } } } private static void failFormat(String template, Object... args) { assertWithMessage(template, args).fail(); } }
package org.apereo.cas.mgmt.services.web.factory; import com.google.common.base.Throwables; import org.apache.commons.lang3.StringUtils; import org.apereo.cas.mgmt.services.web.beans.RegisteredServiceEditBean; import org.apereo.cas.mgmt.services.web.beans.RegisteredServiceLogoutTypeEditBean; import org.apereo.cas.mgmt.services.web.beans.RegisteredServiceOAuthTypeEditBean; import org.apereo.cas.mgmt.services.web.beans.RegisteredServiceOidcTypeEditBean; import org.apereo.cas.mgmt.services.web.beans.RegisteredServicePublicKeyEditBean; import org.apereo.cas.mgmt.services.web.beans.RegisteredServiceSamlTypeEditBean; import org.apereo.cas.mgmt.services.web.beans.RegisteredServiceTypeEditBean; import org.apereo.cas.mgmt.services.web.beans.RegisteredServiceViewBean; import org.apereo.cas.services.AbstractRegisteredService; import org.apereo.cas.services.DefaultRegisteredServiceProperty; import org.apereo.cas.services.LogoutType; import org.apereo.cas.services.OidcRegisteredService; import org.apereo.cas.services.RegexRegisteredService; import org.apereo.cas.services.RegisteredService; import org.apereo.cas.services.RegisteredServiceProperty; import org.apereo.cas.services.RegisteredServicePublicKey; import org.apereo.cas.services.RegisteredServicePublicKeyImpl; import org.apereo.cas.support.oauth.services.OAuthCallbackAuthorizeService; import org.apereo.cas.support.oauth.services.OAuthRegisteredService; import org.apereo.cas.support.saml.services.SamlRegisteredService; import org.apereo.cas.util.RegexUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.URL; import java.util.Map; import java.util.Set; /** * Default mapper for converting {@link RegisteredService} to/from {@link RegisteredServiceEditBean.ServiceData}. * * @author Daniel Frett * @since 4.2 */ public class DefaultRegisteredServiceMapper implements RegisteredServiceMapper { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultRegisteredServiceMapper.class); @Override public void mapRegisteredService(final RegisteredService svc, final RegisteredServiceEditBean.ServiceData bean) { bean.setAssignedId(Long.toString(svc.getId())); bean.setServiceId(svc.getServiceId()); bean.setName(svc.getName()); bean.setDescription(svc.getDescription()); if (svc.getLogo() != null) { bean.setLogoUrl(svc.getLogo().toExternalForm()); } bean.setRequiredHandlers(svc.getRequiredHandlers()); if (svc instanceof OAuthCallbackAuthorizeService) { bean.setType(RegisteredServiceTypeEditBean.OAUTH_CALLBACK_AUTHZ.toString()); } if (svc instanceof OAuthRegisteredService) { bean.setType(RegisteredServiceTypeEditBean.OAUTH.toString()); final OAuthRegisteredService oauth = (OAuthRegisteredService) svc; final RegisteredServiceOAuthTypeEditBean oauthBean = bean.getOauth(); oauthBean.setBypass(oauth.isBypassApprovalPrompt()); oauthBean.setClientId(oauth.getClientId()); oauthBean.setClientSecret(oauth.getClientSecret()); oauthBean.setRefreshToken(oauth.isGenerateRefreshToken()); oauthBean.setJsonFormat(oauth.isJsonFormat()); if (svc instanceof OidcRegisteredService) { bean.setType(RegisteredServiceTypeEditBean.OIDC.toString()); final OidcRegisteredService oidc = (OidcRegisteredService) svc; final RegisteredServiceOidcTypeEditBean oidcBean = bean.getOidc(); oidcBean.setJwks(oidc.getJwks()); oidcBean.setSignToken(oidc.isSignIdToken()); } } if (svc instanceof SamlRegisteredService) { bean.setType(RegisteredServiceTypeEditBean.SAML.toString()); final SamlRegisteredService saml = (SamlRegisteredService) svc; final RegisteredServiceSamlTypeEditBean samlbean = bean.getSaml(); samlbean.setMdLoc(saml.getMetadataLocation()); samlbean.setMdMaxVal(saml.getMetadataMaxValidity()); samlbean.setMdSigLoc(saml.getMetadataSignatureLocation()); samlbean.setAuthCtxCls(saml.getRequiredAuthenticationContextClass()); samlbean.setEncAssert(saml.isEncryptAssertions()); samlbean.setSignResp(saml.isSignResponses()); samlbean.setSignAssert(saml.isSignAssertions()); } bean.setTheme(svc.getTheme()); bean.setEvalOrder(svc.getEvaluationOrder()); final LogoutType logoutType = svc.getLogoutType(); switch (logoutType) { case BACK_CHANNEL: bean.setLogoutType(RegisteredServiceLogoutTypeEditBean.BACK.toString()); break; case FRONT_CHANNEL: bean.setLogoutType(RegisteredServiceLogoutTypeEditBean.FRONT.toString()); break; default: bean.setLogoutType(RegisteredServiceLogoutTypeEditBean.NONE.toString()); break; } final URL url = svc.getLogoutUrl(); if (url != null) { bean.setLogoutUrl(url.toExternalForm()); } final RegisteredServicePublicKey key = svc.getPublicKey(); final RegisteredServicePublicKeyEditBean pBean = bean.getPublicKey(); if (key != null) { pBean.setAlgorithm(key.getAlgorithm()); pBean.setLocation(key.getLocation()); } final Map<String, RegisteredServiceProperty> props = svc.getProperties(); final Map<String, Set<String>> beanProps = bean.getProps(); for (final Map.Entry<String, RegisteredServiceProperty> stringRegisteredServicePropertyEntry : props.entrySet()) { final Set<String> set = stringRegisteredServicePropertyEntry.getValue().getValues(); beanProps.put(stringRegisteredServicePropertyEntry.getKey(), set); } } @Override public void mapRegisteredService(final RegisteredService svc, final RegisteredServiceViewBean bean) { bean.setAssignedId(Long.toString(svc.getId())); bean.setServiceId(svc.getServiceId()); bean.setName(svc.getName()); bean.setDescription(svc.getDescription()); bean.setEvalOrder(svc.getEvaluationOrder()); if (svc.getLogo() != null) { bean.setLogoUrl(svc.getLogo().toExternalForm()); } } @Override public RegisteredService toRegisteredService(final RegisteredServiceEditBean.ServiceData data) { try { final AbstractRegisteredService regSvc; // create base RegisteredService object final String type = data.getType(); if (StringUtils.equalsIgnoreCase(type, RegisteredServiceTypeEditBean.OAUTH_CALLBACK_AUTHZ.toString())) { regSvc = new OAuthCallbackAuthorizeService(); } else if (StringUtils.equalsIgnoreCase(type, RegisteredServiceTypeEditBean.OAUTH.toString()) || StringUtils.equalsIgnoreCase(type, RegisteredServiceTypeEditBean.OIDC.toString())) { if (StringUtils.equalsIgnoreCase(type, RegisteredServiceTypeEditBean.OAUTH.toString())) { regSvc = new OAuthRegisteredService(); } else { regSvc = new OidcRegisteredService(); } final RegisteredServiceOAuthTypeEditBean oauthBean = data.getOauth(); ((OAuthRegisteredService) regSvc).setClientId(oauthBean.getClientId()); ((OAuthRegisteredService) regSvc).setClientSecret(oauthBean.getClientSecret()); ((OAuthRegisteredService) regSvc).setBypassApprovalPrompt(oauthBean.isBypass()); ((OAuthRegisteredService) regSvc).setGenerateRefreshToken(oauthBean.isRefreshToken()); ((OAuthRegisteredService) regSvc).setJsonFormat(oauthBean.isJsonFormat()); if (StringUtils.equalsIgnoreCase(type, RegisteredServiceTypeEditBean.OIDC.toString())) { ((OidcRegisteredService) regSvc).setJwks(data.getOidc().getJwks()); ((OidcRegisteredService) regSvc).setSignIdToken(data.getOidc().isSignToken()); } } else if (StringUtils.equalsIgnoreCase(type, RegisteredServiceTypeEditBean.SAML.toString())) { regSvc = new SamlRegisteredService(); final RegisteredServiceSamlTypeEditBean samlBean = data.getSaml(); ((SamlRegisteredService) regSvc).setEncryptAssertions(samlBean.isEncAssert()); ((SamlRegisteredService) regSvc).setSignAssertions(samlBean.isSignAssert()); ((SamlRegisteredService) regSvc).setSignResponses(samlBean.isSignResp()); ((SamlRegisteredService) regSvc).setMetadataLocation(samlBean.getMdLoc()); ((SamlRegisteredService) regSvc).setMetadataSignatureLocation(samlBean.getMdSigLoc()); ((SamlRegisteredService) regSvc).setMetadataMaxValidity(samlBean.getMdMaxVal()); ((SamlRegisteredService) regSvc).setRequiredAuthenticationContextClass(samlBean.getAuthCtxCls()); } else { if (RegexUtils.isValidRegex(data.getServiceId())) { regSvc = new RegexRegisteredService(); } else { throw new RuntimeException("Invalid service type."); } } // set the assigned Id final long assignedId = Long.parseLong(data.getAssignedId()); if (assignedId <= 0) { regSvc.setId(RegisteredService.INITIAL_IDENTIFIER_VALUE); } else { regSvc.setId(assignedId); } // set simple RegisteredService properties regSvc.setServiceId(data.getServiceId()); regSvc.setName(data.getName()); regSvc.setDescription(data.getDescription()); if (StringUtils.isNotBlank(data.getLogoUrl())) { regSvc.setLogo(new URL(data.getLogoUrl())); } regSvc.setTheme(data.getTheme()); regSvc.setEvaluationOrder(data.getEvalOrder()); regSvc.setRequiredHandlers(data.getRequiredHandlers()); // process logout settings regSvc.setLogoutType(parseLogoutType(data.getLogoutType())); if (StringUtils.isNotBlank(data.getLogoutUrl())) { regSvc.setLogoutUrl(new URL(data.getLogoutUrl())); } // process the Public Key final RegisteredServicePublicKeyEditBean publicKey = data.getPublicKey(); if (publicKey != null && publicKey.isValid()) { regSvc.setPublicKey(new RegisteredServicePublicKeyImpl(publicKey.getLocation(), publicKey .getAlgorithm())); } final Map<String, Set<String>> props = data.getProps(); for (final Map.Entry<String, Set<String>> stringSetEntry : props.entrySet()) { final DefaultRegisteredServiceProperty value = new DefaultRegisteredServiceProperty(); value.setValues(stringSetEntry.getValue()); regSvc.getProperties().put(stringSetEntry.getKey(), value); } return regSvc; } catch (final Exception e) { throw Throwables.propagate(e); } } /** * Parse raw logout type string to {@link LogoutType}. * * @param logoutType the reg svc */ private static LogoutType parseLogoutType(final String logoutType) { if (StringUtils.equalsIgnoreCase(logoutType, RegisteredServiceLogoutTypeEditBean.BACK.toString())) { return LogoutType.BACK_CHANNEL; } else if (StringUtils.equalsIgnoreCase(logoutType, RegisteredServiceLogoutTypeEditBean.FRONT.toString())) { return LogoutType.FRONT_CHANNEL; } else { return LogoutType.NONE; } } }
package org.sbolstandard.core2; import java.io.BufferedReader; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.StringReader; import java.io.Writer; import java.net.URI; import java.util.HashSet; import java.util.Set; import javax.xml.namespace.QName; /** * Methods to convert FASTA to/from SBOL Sequences * @author Chris Myers * @author Ernst Oberortner * @version 2.1 */ class GFF3 { public static final String GFF3NAMESPACE = "http://sbolstandard.org/gff3#"; public static final String GFF3PREFIX = "gff3"; public static final String SOURCE = "source"; public static final String SCORE = "score"; public static final String PHASE = "phase"; // "look-ahead" line private static String nextLine = null; /** * Serializes all Sequence in an SBOLDocument to the given output stream in FASTA format. * @param document a given SBOLDocument * @param out the output stream to serialize into * @throws IOException input/output operation failed * @throws SBOLConversionException violates conversion limitations */ static void write(SBOLDocument document, OutputStream out) throws IOException, SBOLConversionException { Writer w = new OutputStreamWriter(out, "UTF-8"); w.write("##gff-version 3\n"); for (ComponentDefinition componentDefinition : document.getRootComponentDefinitions()) { int start = -1; int end = -1; for (SequenceAnnotation sa : componentDefinition.getSequenceAnnotations()) { for (Location loc : sa.getLocations()) { if (loc instanceof Range) { Range range = (Range)loc; if (start==-1 || (start!=-1 && range.getStart()<start)) { start = range.getStart(); } if (end==-1 || (end!=-1 && range.getEnd()>end)){ end = range.getEnd(); } } } } w.write("##sequence-region " + componentDefinition.getDisplayId() + " " + start + " " + end + "\n"); } for (ComponentDefinition componentDefinition : document.getRootComponentDefinitions()) { write(w,componentDefinition,componentDefinition.getDisplayId(),null,0,true,0); } for (ComponentDefinition componentDefinition : document.getRootComponentDefinitions()) { w.write(">" + componentDefinition.getDisplayId() + "\n"); Sequence seq = componentDefinition.getSequenceByEncoding(Sequence.IUPAC_DNA); FASTA.writeFASTALine(w, seq.getElements()); } w.close(); } private static void write(Writer w, ComponentDefinition componentDefinition, String id, String parentId, int offset, boolean inline, int featureEnd) throws IOException, SBOLConversionException { if (!componentDefinition.getTypes().contains(ComponentDefinition.DNA_REGION)) { throw new SBOLConversionException("GFF 3 is only supported for DNA components."); } SequenceOntology so = new SequenceOntology(); for (SequenceAnnotation sa : componentDefinition.getSequenceAnnotations()) { w.write(id + "\t"); Annotation annotation = sa.getAnnotation(new QName(GFF3NAMESPACE,SOURCE,GFF3PREFIX)); String source = "."; if (annotation!=null) { source = annotation.getStringValue(); } w.write(source + "\t"); String type = "sequence_feature"; if (sa.isSetComponent()) { ComponentDefinition comp = sa.getComponentDefinition(); if (comp!=null) { for (URI role : comp.getRoles()) { type = so.getName(role); if (type != null) { break; } } } } else { for (URI role : sa.getRoles()) { type = so.getName(role); if (type != null) { break; } } } w.write(type + "\t"); for (Location location : sa.getLocations()) { if (location instanceof Range) { Range range = (Range)location; int start = offset+range.getStart(); int end = offset+range.getEnd(); if (!inline) { int tmpOffset = (featureEnd - (GenBank.getFeatureEnd(sa)+GenBank.getFeatureStart(sa)-1) - offset); start = tmpOffset+range.getStart(); end = tmpOffset+range.getEnd(); } w.write(start + "\t" + end + "\t"); annotation = sa.getAnnotation(new QName(GFF3NAMESPACE,SCORE,GFF3PREFIX)); String score = "."; if (annotation!=null) { score = annotation.getStringValue(); } w.write(score + "\t"); if (!range.isSetOrientation()) { w.write(".\t"); } else if (range.getOrientation().equals(OrientationType.INLINE)) { w.write("+\t"); } else if (range.getOrientation().equals(OrientationType.REVERSECOMPLEMENT)) { w.write("-\t"); } else { w.write(".\t"); } break; } } annotation = sa.getAnnotation(new QName(GFF3NAMESPACE,PHASE,GFF3PREFIX)); String phase = "0"; if (annotation!=null) { phase = annotation.getStringValue(); } w.write(phase + "\t"); String featureId = sa.getDisplayId(); if (sa.isSetComponent() && sa.getComponentDefinition() != null) { featureId = sa.getComponentDefinition().getDisplayId(); } w.write("ID="+featureId); String label = null; if (sa.isSetName()) { label = sa.getName(); } else if (sa.isSetComponent() && sa.getComponent() != null && sa.getComponent().isSetName()) { label = sa.getComponent().getName(); } else if (sa.isSetComponent() && sa.getComponent().getDefinition() != null && sa.getComponent().getDefinition().isSetName()) { label = sa.getComponent().getDefinition().getName(); } if (label!=null) { w.write(";Name="+label); } if (parentId!=null) { w.write(";Parent="+parentId); } w.write("\n"); if (sa.isSetComponent()) { ComponentDefinition comp = sa.getComponentDefinition(); if (comp != null) { int newFeatureEnd = featureEnd; if (!GenBank.isInlineFeature(sa)) { newFeatureEnd = GenBank.getFeatureEnd(sa); } write(w,comp,id,featureId, offset + GenBank.getFeatureStart(sa)-1, !(inline^GenBank.isInlineFeature(sa)), newFeatureEnd); } } } } private static String readGFF3Line(BufferedReader br) throws IOException { String newLine = ""; if (nextLine == null) { newLine = br.readLine(); //lineCounter ++; if (newLine == null) return null; newLine = newLine.trim(); } else { newLine = nextLine; } while (true) { nextLine = br.readLine(); if (nextLine==null) return newLine; nextLine = nextLine.trim(); return newLine; } } private static ComponentDefinition findParent(SBOLDocument doc, String id) { for (ComponentDefinition cd : doc.getComponentDefinitions()) { if (cd.getSequenceAnnotation(id) != null) { return cd; } } return null; } private static int findOffset(SBOLDocument doc,String parent) { int offset = 0; ComponentDefinition cd = findParent(doc,parent); if (cd != null) { SequenceAnnotation sa = cd.getSequenceAnnotation(parent); for (Location location : sa.getLocations()) { if (location instanceof Range) { Range range = (Range)location; offset = range.getStart() - 1; } } offset += findOffset(doc,cd.getDisplayId()); } return offset; } private static void addSequenceAnnotation(ComponentDefinition cd, String id, String name, String type, String start, String end, String strand, int offset, String source, String score, String phase) throws SBOLConversionException, SBOLValidationException { if (id==null) { if (name!=null) { id = URIcompliance.fixDisplayId(name); } else { id = "SequeanceAnnotation"+start; } } SequenceOntology so = new SequenceOntology(); URI typeURI = so.getURIbyName(type); if (typeURI==null) { System.out.println("id = " + id + " name = " + name + " type = " + type + " start = "+start+ " end = " + end); throw new SBOLConversionException("Type " + type + " is not a valid Sequence Ontology (SO) term"); } int startInt = Integer.parseInt(start) - offset; int endInt = Integer.parseInt(end) - offset; SequenceAnnotation sa = cd.getSequenceAnnotation(id); if (sa == null) { if (strand.equals("+")) { sa = cd.createSequenceAnnotation(id, "Range", startInt, endInt, OrientationType.INLINE); } else if (strand.equals("-")) { sa = cd.createSequenceAnnotation(id, "Range", startInt, endInt, OrientationType.REVERSECOMPLEMENT); } else { sa = cd.createSequenceAnnotation(id, "Range", startInt, endInt); } sa.setName(name); sa.addRole(typeURI); sa.createAnnotation(new QName(GFF3NAMESPACE, SOURCE, GFF3PREFIX), source); sa.createAnnotation(new QName(GFF3NAMESPACE, SCORE, GFF3PREFIX), score); sa.createAnnotation(new QName(GFF3NAMESPACE, PHASE, GFF3PREFIX), phase); } else { int i = 1; while (sa.getLocation("Range"+i)!=null) i++; if (strand.equals("+")) { sa.addRange("Range"+i, startInt, endInt, OrientationType.INLINE); } else if (strand.equals("-")) { sa.addRange("Range"+i, startInt, endInt, OrientationType.REVERSECOMPLEMENT); } else { sa.addRange("Range"+i, startInt, endInt); } } } /** * @param doc * @param stringBuffer * @param URIprefix * @param version * @param encoding * @throws SBOLValidationException if an SBOL validation rule was violated in {@link #createSequence(SBOLDocument, String, String, String, String, URI)}. * @throws IOException * @throws SBOLConversionException */ static void read(SBOLDocument doc,String stringBuffer,String URIprefix,String version,URI encoding) throws SBOLValidationException, IOException, SBOLConversionException { Set<ComponentDefinition> parentCDs = new HashSet<>(); // reset the global static variables needed for parsing nextLine = null; //lineCounter = 0; boolean sequenceMode = false; String contigId = null; StringBuilder sbSequence = new StringBuilder(); String strLine; BufferedReader br = new BufferedReader(new StringReader(stringBuffer)); while ((strLine = readGFF3Line(br)) != null) { strLine = strLine.trim(); if (strLine.startsWith(">")) { if (sequenceMode) { Sequence sequence = doc.createSequence(contigId+"_seq",version, sbSequence.toString(),Sequence.IUPAC_DNA); sbSequence = new StringBuilder(); ComponentDefinition cd = doc.getComponentDefinition(contigId, version); if (cd != null) { cd.addSequence(sequence); } } sequenceMode = true; contigId = URIcompliance.fixDisplayId(strLine.replaceFirst(">", "").trim()); } else if (sequenceMode){ String[] strSplit = strLine.split(" "); for (int i = 0; i < strSplit.length; i++) { sbSequence.append(strSplit[i]); } } else if (strLine.startsWith("##gff-version 3")) { // skip } else if (strLine.startsWith("##sequence-region")) { String [] splits = strLine.split("\\s+"); if (splits.length < 4) { throw new SBOLConversionException("Misformated sequence region, expected 4 columns:\n"+strLine); } String id = URIcompliance.fixDisplayId(splits[1]); doc.createComponentDefinition(id, version, ComponentDefinition.DNA_REGION); } else if (!strLine.startsWith("##")) { String [] splits = strLine.split("\\t"); if (splits.length < 9) { throw new SBOLConversionException("Misformated annotation, expected 9 columns:\n"+strLine); } String seqId = URIcompliance.fixDisplayId(splits[0]); String source = splits[1]; String type = splits[2]; String start = splits[3]; String end = splits[4]; String score = splits[5]; String strand = splits[6]; String phase = splits[7]; String attributesCol = splits[8]; String id = null; String name = null; int offset = 0; ComponentDefinition cd = doc.getComponentDefinition(seqId, version); ComponentDefinition parentCD = null; if (cd==null) { throw new SBOLConversionException("Sequence region missing for sequence " + seqId); } String [] attributes = attributesCol.split(";"); parentCDs.clear(); for (String attribute : attributes) { if (attribute.startsWith("ID=")) { id = URIcompliance.fixDisplayId(attribute.replace("ID=", "")); } else if (attribute.startsWith("Name=")) { name = attribute.replace("Name=", ""); } else if (attribute.startsWith("Parent=")) { String [] parents = attribute.replace("Parent=", "").split(","); for (String parent : parents) { parentCD = doc.getComponentDefinition(parent, version); if (parentCD == null) { cd = findParent(doc,parent); SequenceAnnotation sa = cd.getSequenceAnnotation(parent); parentCD = doc.createComponentDefinition(parent, version, ComponentDefinition.DNA_REGION); parentCD.setRoles(sa.getRoles()); sa.clearRoles(); cd.createComponent(parent+"_comp", AccessType.PUBLIC, parentCD.getDisplayId()); sa.setComponent(parent+"_comp"); } //offset = findOffset(doc,parent); parentCDs.add(parentCD); //cd = parentCD; } } } if (parentCDs.size() > 0) { for (ComponentDefinition pCD : parentCDs) { offset = findOffset(doc,pCD.getDisplayId()); addSequenceAnnotation(pCD, id, name, type, start, end, strand, offset, source, score, phase); } } else { addSequenceAnnotation(cd, id, name, type, start, end, strand, 0, source, score, phase); } } } if (sequenceMode) { Sequence sequence = doc.createSequence(contigId+"_seq",version, sbSequence.toString(),Sequence.IUPAC_DNA); sbSequence = new StringBuilder(); ComponentDefinition cd = doc.getComponentDefinition(contigId, version); if (cd != null) { cd.addSequence(sequence); } } br.close(); } // /** // * The read method imports all sequences (represented in FASTA format), stores // * them in an SBOLDocument object, and returns the SBOLDocument object. // * // * @param in ... the input stream that contains the sequences in FASTA format // * @param URIPrefix ... the URI prefix of the sequences // * @param displayId // * @param version ... the version of the sequences // * @param encoding ... the encoding of the sequences (i.e. DNA, RNA, or Protein) // * // * @return an SBOLDocument object that contains the imported FASTA sequences as SBOL Sequence objects // * @throws SBOLConversionException // * // * @throws IOException // * @throws SBOLValidationException // */ // public static SBOLDocument read(InputStream in,String URIPrefix,String displayId,String version,URI encoding) throws SBOLConversionException, SBOLValidationException, IOException // { // // /* // * EO: it's unclear how we map the FASTA description to SBOL displayID/description? // * Shouldn't we just use the FASTA description as both displayID and description? // */ // // SBOLDocument doc = new SBOLDocument(); // doc.setCreateDefaults(true); // // // check that the caller provided a valid URIprefix // if (URIPrefix==null) { // throw new SBOLConversionException("No URI prefix has been provided."); // } // // // if the URIprefix is valid, than we set it in the document // doc.setDefaultURIprefix(URIPrefix); // // // parse the stream's content // read(doc,in,URIPrefix,displayId,version,encoding); // // // lastly, return the SBOLDocument object that contains // // all sequences represented as SBOL objects // return doc; // } // /** // * Takes in the given FASTA file and converts the file to an SBOLDocument. // * // * @param file the given FASTA filename // * @param URIprefix the URI prefix used for generated Sequence objects // * @param displayId the base displayId to use for generated Sequence objects (null will use description as id) // * @param version the verison used for generated Sequence objects // * @param encoding the encoding assumed for generated Sequence objects // * @return the converted SBOLDocument instance // * @throws SBOLConversionException violates conversion limitations // * @throws SBOLValidationException violates sbol validation rule // * @throws IOException input/output operation failed // */ // public static SBOLDocument read(File file,String URIprefix,String displayId,String version,URI encoding) // throws IOException, SBOLConversionException, SBOLValidationException // { // FileInputStream stream = new FileInputStream(file); // BufferedInputStream buffer = new BufferedInputStream(stream); // return read(buffer,URIprefix,displayId,version,encoding); // } // /** // * Takes in the given FASTA filename and converts the file to an SBOLDocument. // * // * @param fileName the given FASTA filename // * @param URIprefix the URI prefix used for generated Sequence objects // * @param displayId the base displayId to use for generated Sequence objects (null will use description as id) // * @param version the verison used for generated Sequence objects // * @param encoding the encoding assumed for generated Sequence objects // * @return the converted SBOLDocument // * @throws SBOLConversionException violates conversion limitations // * @throws SBOLValidationException violates sbol validation rule // * @throws IOException input/output operation failed // */ // public static SBOLDocument read(String fileName,String URIprefix,String displayId,String version,URI encoding) // throws IOException, SBOLConversionException, SBOLValidationException // { // return read(new File(fileName),URIprefix,displayId,version,encoding); // } // public static void main(String[] args) throws SBOLConversionException, IOException, SBOLValidationException { // SBOLDocument doc = read("/Users/myers/Downloads/sample.fasta","http://dummy.org","dummy","",Sequence.IUPAC_DNA); // //doc.write(System.out); // write(doc, System.out); // } }
package org.cornutum.examples; import org.junit.Test; import org.hamcrest.Matcher; import static io.restassured.RestAssured.*; import static org.hamcrest.Matchers.*; public class AllPaths_PostTest { @Test public void headPost_UserAttributesDefined_Is_Yes() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,1") .queryParam( "user attributes[user-type]", "Typical User") .when() .request( "HEAD", "/post") .then() .statusCode( isSuccess()) ; } @Test public void headPost_UserAttributesValuePropertiesUserTypeValue_Is_VIP() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "1,2") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() .statusCode( isSuccess()) ; } @Test public void headPost_PostValuePropertiesPostReferencesItemsContainsValue_Is_2() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "2,1") .queryParam( "user attributes[user-type]", "Typical User") .when() .request( "HEAD", "/post") .then() .statusCode( isSuccess()) ; } @Test public void headPost_UserAttributesDefined_Is_No() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,2") .when() .request( "HEAD", "/post") .then() // user-attributes.Defined=No .statusCode( isBadRequest()) ; } @Test public void headPost_UserAttributesType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,2") .queryParam( "user attributes", (String) null) .when() .request( "HEAD", "/post") .then() // user-attributes.Type=null .statusCode( isBadRequest()) ; } @Test public void headPost_UserAttributesType_Is_NotObject() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,2") .queryParam( "user attributes", "-163.3") .when() .request( "HEAD", "/post") .then() // user-attributes.Type=Not object .statusCode( isBadRequest()) ; } @Test public void headPost_UserAttributesValuePropertiesUserTypeDefined_Is_No() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,2") .when() .request( "HEAD", "/post") .then() // user-attributes.Value.Properties.user-type.Defined=No .statusCode( isBadRequest()) ; } @Test public void headPost_UserAttributesValuePropertiesUserTypeType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,2") .queryParam( "user attributes[user-type]", (String) null) .when() .request( "HEAD", "/post") .then() // user-attributes.Value.Properties.user-type.Type=null .statusCode( isBadRequest()) ; } @Test public void headPost_UserAttributesValuePropertiesUserTypeValue_Is_Other() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,1") .queryParam( "user attributes[user-type]", "*76s/1*r") .when() .request( "HEAD", "/post") .then() // user-attributes.Value.Properties.user-type.Value=Other .statusCode( isBadRequest()) ; } @Test public void headPost_UserAttributesValuePropertiesAdditional_Is_Yes() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,2") .queryParam( "user attributes[user-type]", "VIP!") .queryParam( "user attributes[wfgawhzebqxw]", "-993.3") .queryParam( "user attributes[bt]", "-646") .queryParam( "user attributes[d]", "429") .when() .request( "HEAD", "/post") .then() // user-attributes.Value.Properties.Additional=Yes .statusCode( isBadRequest()) ; } @Test public void headPost_PostDefined_Is_No() { given() .baseUri( forTestServer()) .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Defined=No .statusCode( isBadRequest()) ; } @Test public void headPost_PostType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "post?", (String) null) .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Type=null .statusCode( isBadRequest()) ; } @Test public void headPost_PostType_Is_NotObject() { given() .baseUri( forTestServer()) .queryParam( "post?", "-689.9") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Type=Not object .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesPostReferencesDefined_Is_No() { given() .baseUri( forTestServer()) .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.post-references.Defined=No .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesPostReferencesType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", (String) null) .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.post-references.Type=null .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesPostReferencesType_Is_NotArray() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "true") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.post-references.Type=Not array .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesPostReferencesItemsSize_Is_1() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.post-references.Items.Size=1 .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesPostReferencesItemsSize_Is_3() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,1,2") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.post-references.Items.Size=3 .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesPostReferencesItemsContainsType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", ",1") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.post-references.Items.Contains.Type=null .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesPostReferencesItemsContainsType_Is_NotInteger() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", ",,NU,2") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.post-references.Items.Contains.Type=Not integer .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesPostReferencesItemsContainsValue_Is_Other() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "518413185,0") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.post-references.Items.Contains.Value.Is=Other .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesPostReferencesItemsUnique_Is_No() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,0") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.post-references.Items.Unique=No .statusCode( isBadRequest()) ; } @Test public void headPost_PostValuePropertiesAdditional_Is_Yes() { given() .baseUri( forTestServer()) .queryParam( "post?[post-references]", "0,1") .queryParam( "post?[pfuxkykifiozux]", "g\"YtKR,&") .queryParam( "user attributes[user-type]", "VIP!") .when() .request( "HEAD", "/post") .then() // post.Value.Properties.Additional=Yes .statusCode( isBadRequest()) ; } @Test public void patchPost_PostMarksDefined_Is_Yes() { given() .baseUri( forTestServer()) .queryParam( "Post Marks", "{X}") .when() .request( "PATCH", "/post") .then() .statusCode( isSuccess()) ; } @Test public void patchPost_PostMarksItemsSize_Is_3() { given() .baseUri( forTestServer()) .queryParam( "Post Marks", "<Y> {X} #Z") .when() .request( "PATCH", "/post") .then() .statusCode( isSuccess()) ; } @Test public void patchPost_PostMarksItemsContainsValue_Is_Z() { given() .baseUri( forTestServer()) .queryParam( "Post Marks", "#Z") .when() .request( "PATCH", "/post") .then() .statusCode( isSuccess()) ; } @Test public void patchPost_PostMarksItemsUnique_Is_No() { given() .baseUri( forTestServer()) .queryParam( "Post Marks", "{X} {X} {X}") .when() .request( "PATCH", "/post") .then() .statusCode( isSuccess()) ; } @Test public void patchPost_PostMarksDefined_Is_No() { given() .baseUri( forTestServer()) .when() .request( "PATCH", "/post") .then() // Post-Marks.Defined=No .statusCode( isBadRequest()) ; } @Test public void patchPost_PostMarksType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "Post Marks", (String) null) .when() .request( "PATCH", "/post") .then() // Post-Marks.Type=null .statusCode( isBadRequest()) ; } @Test public void patchPost_PostMarksType_Is_NotArray() { given() .baseUri( forTestServer()) .queryParam( "Post Marks", "+") .when() .request( "PATCH", "/post") .then() // Post-Marks.Type=Not array .statusCode( isBadRequest()) ; } @Test public void patchPost_PostMarksItemsSize_Is_4() { given() .baseUri( forTestServer()) .queryParam( "Post Marks", "<Y> <Y> <Y> {X}") .when() .request( "PATCH", "/post") .then() // Post-Marks.Items.Size=4 .statusCode( isBadRequest()) ; } @Test public void patchPost_PostMarksItemsContainsType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "Post Marks", "") .when() .request( "PATCH", "/post") .then() // Post-Marks.Items.Contains.Type=null .statusCode( isBadRequest()) ; } @Test public void patchPost_PostMarksItemsContainsValue_Is_Other() { given() .baseUri( forTestServer()) .queryParam( "Post Marks", "#/lL\"%`\\M,7/RPK2]eZ/b\"LtW=MB6,(") .when() .request( "PATCH", "/post") .then() // Post-Marks.Items.Contains.Value=Other .statusCode( isBadRequest()) ; } @Test public void putPost_PostIdDefined_Is_Yes() { given() .baseUri( forTestServer()) .queryParam( "postId", "0") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "true") .formParam( "reviewer", "Larry Moe") .when() .request( "PUT", "/post") .then() .statusCode( isSuccess()) ; } @Test public void putPost_PostIdValue_Is_Gt_0() { given() .baseUri( forTestServer()) .queryParam( "postId", "884128300094585099.3") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "false") .formParam( "reviewer", "(?)") .when() .request( "PUT", "/post") .then() .statusCode( isSuccess()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedValuePropertiesReviewerValue_Is_MeYou() { given() .baseUri( forTestServer()) .queryParam( "postId", "0") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "true") .formParam( "reviewer", "Me+You") .when() .request( "PUT", "/post") .then() .statusCode( isSuccess()) ; } @Test public void putPost_PostIdDefined_Is_No() { given() .baseUri( forTestServer()) .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "false") .formParam( "reviewer", "Larry Moe") .when() .request( "PUT", "/post") .then() // postId.Defined=No .statusCode( isBadRequest()) ; } @Test public void putPost_PostIdType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "postId", (String) null) .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "false") .formParam( "reviewer", "Larry Moe") .when() .request( "PUT", "/post") .then() // postId.Type=null .statusCode( isBadRequest()) ; } @Test public void putPost_PostIdType_Is_NotNumber() { given() .baseUri( forTestServer()) .queryParam( "postId", "%") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "false") .formParam( "reviewer", "Larry Moe") .when() .request( "PUT", "/post") .then() // postId.Type=Not number .statusCode( isBadRequest()) ; } @Test public void putPost_PostIdValue_Is_M1() { given() .baseUri( forTestServer()) .queryParam( "postId", "-1") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "false") .formParam( "reviewer", "Larry Moe") .when() .request( "PUT", "/post") .then() // postId.Value.Is=-1 .statusCode( isBadRequest()) ; } @Test public void putPost_BodyDefined_Is_No() { given() .baseUri( forTestServer()) .queryParam( "postId", "579110988210992054.5") .when() .request( "PUT", "/post") .then() // Body.Defined=No .statusCode( isBadRequest()) ; } @Test public void putPost_BodyMediaType_Is_Other() { given() .baseUri( forTestServer()) .queryParam( "postId", "100055597218570470.8") .contentType( "application/xml") .request().body( ">jg-FQI") .when() .request( "PUT", "/post") .then() // Body.Media-Type=Other .statusCode( isBadRequest()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "postId", "545768800747318227.7") .contentType( "application/x-www-form-urlencoded") .when() .request( "PUT", "/post") .then() // Body.application-x-www-form-urlencoded.Type=null .statusCode( isBadRequest()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedType_Is_NotObject() { given() .baseUri( forTestServer()) .queryParam( "postId", "787781271506673512.7") .contentType( "application/x-www-form-urlencoded") .formParam( "string", "") .when() .request( "PUT", "/post") .then() // Body.application-x-www-form-urlencoded.Type=Not object .statusCode( isBadRequest()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedValuePropertiesApprovedDefined_Is_No() { given() .baseUri( forTestServer()) .queryParam( "postId", "887390451195556957.7") .contentType( "application/x-www-form-urlencoded") .formParam( "reviewer", "Larry Moe") .when() .request( "PUT", "/post") .then() // Body.application-x-www-form-urlencoded.Value.Properties.approved.Defined=No .statusCode( isBadRequest()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedValuePropertiesApprovedType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "postId", "218911377319422868.8") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", (String) null) .formParam( "reviewer", "Larry Moe") .when() .request( "PUT", "/post") .then() // Body.application-x-www-form-urlencoded.Value.Properties.approved.Type=null .statusCode( isBadRequest()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedValuePropertiesApprovedType_Is_NotBoolean() { given() .baseUri( forTestServer()) .queryParam( "postId", "847512139010470218.2") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "eytx,7ea{") .formParam( "reviewer", "Larry Moe") .when() .request( "PUT", "/post") .then() // Body.application-x-www-form-urlencoded.Value.Properties.approved.Type=Not boolean .statusCode( isBadRequest()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedValuePropertiesReviewerDefined_Is_No() { given() .baseUri( forTestServer()) .queryParam( "postId", "711003745143914146.9") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "false") .when() .request( "PUT", "/post") .then() // Body.application-x-www-form-urlencoded.Value.Properties.reviewer.Defined=No .statusCode( isBadRequest()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedValuePropertiesReviewerType_Is_Null() { given() .baseUri( forTestServer()) .queryParam( "postId", "167771822150204639.4") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "false") .formParam( "reviewer", (String) null) .when() .request( "PUT", "/post") .then() // Body.application-x-www-form-urlencoded.Value.Properties.reviewer.Type=null .statusCode( isBadRequest()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedValuePropertiesReviewerValue_Is_Other() { given() .baseUri( forTestServer()) .queryParam( "postId", "886308504886987482.6") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "false") .formParam( "reviewer", "i&") .when() .request( "PUT", "/post") .then() // Body.application-x-www-form-urlencoded.Value.Properties.reviewer.Value=Other .statusCode( isBadRequest()) ; } @Test public void putPost_BodyApplicationXWwwFormUrlencodedValuePropertiesAdditional_Is_Yes() { given() .baseUri( forTestServer()) .queryParam( "postId", "920816794015899048.8") .contentType( "application/x-www-form-urlencoded") .formParam( "approved", "false") .formParam( "reviewer", "Larry Moe") .formParam( "dxobuuyffc", "true") .formParam( "wssfr", "true") .when() .request( "PUT", "/post") .then() // Body.application-x-www-form-urlencoded.Value.Properties.Additional=Yes .statusCode( isBadRequest()) ; } private static Matcher<Integer> isSuccess() { return allOf( greaterThanOrEqualTo(200), lessThan(300)); } private static Matcher<Integer> isBadRequest() { return allOf( greaterThanOrEqualTo(400), lessThan(500)); } private static String forTestServer() { return forTestServer( null); } private static String forTestServer( String defaultUri) { String testServer = tcasesApiServer(); return defaultUri == null || !testServer.isEmpty() ? testServer : defaultUri; } private static String tcasesApiServer() { String uri = System.getProperty( "tcasesApiServer"); return uri == null? "" : uri.trim(); } }
/* * Copyright 2009-2020 Aarhus University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dk.brics.tajs.analysis.nativeobjects; import dk.brics.tajs.analysis.Conversion; import dk.brics.tajs.analysis.Exceptions; import dk.brics.tajs.analysis.InitialStateBuilder; import dk.brics.tajs.analysis.PropVarOperations; import dk.brics.tajs.analysis.Solver; import dk.brics.tajs.lattice.Bool; import dk.brics.tajs.lattice.Context; import dk.brics.tajs.lattice.ObjectLabel; import dk.brics.tajs.lattice.UnknownValueResolver; import dk.brics.tajs.lattice.Value; import dk.brics.tajs.util.Collectors; import java.util.Optional; import java.util.Set; import static dk.brics.tajs.util.Collections.newSet; /** * Property descriptor. * (ES5 8.10) * Used by Object.defineProperty and Object.prototype.__defineGetter/Setter__. */ public class PropertyDescriptor { private final Value enumerable; private final Value configurable; private final Value writable; private final Value value; private final Set<ObjectLabel> get; private final Set<ObjectLabel> set; private PropertyDescriptor(Value enumerable, Value configurable, Value writable, Value value, Set<ObjectLabel> get, Set<ObjectLabel> set) { this.enumerable = enumerable; this.configurable = configurable; this.writable = writable; this.value = value; this.get = get; this.set = set; } /** * ES5 8.10.5 */ public static PropertyDescriptor toDefinePropertyPropertyDescriptor(Value obj, Solver.SolverInterface c) { // FIXME: check for "TypeError: Invalid property descriptor. Cannot both specify accessors and a value or writable attribute" (GitHub #354) obj = UnknownValueResolver.getRealValue(obj, c.getState()); if (obj.isNone()) { return makeBottomPropertyDescriptor(); } if (!obj.isMaybeObject()) { Exceptions.throwTypeError(c); // FIXME: should also throw type error if *maybe* non-object? (but in that case only weakly) (GitHub #354) c.getState().setToBottom(); } Set<ObjectLabel> objects = obj.getObjectLabels(); Value enumerable = readProperty(objects, "enumerable", true, c); Value configurable = readProperty(objects, "configurable", true, c); Value writable = readProperty(objects, "writable", true, c); Value value = readProperty(objects, "value", false, c); Value get = readProperty(objects, "get", false, c); Value set = readProperty(objects, "set", false, c); return constructAndCheck(enumerable, configurable, writable, value, get, set, c); } /** * ES5 8.10.5 */ public static PropertyDescriptor toDefineGetterSetterPropertyDescriptor(Value fun, boolean getter, Solver.SolverInterface c) { fun = UnknownValueResolver.getRealValue(fun, c.getState()); Value enumerable = Value.makeBool(true); Value configurable = Value.makeBool(true); Value writable = Value.makeBool(false); Value value = Value.makeAbsent(); Value get = getter ? fun : Value.makeAbsent(); Value set = getter ? Value.makeAbsent() : fun; return constructAndCheck(enumerable, configurable, writable, value, get, set, c); } public static PropertyDescriptor fromProperty(Value property) { Value configurable = Value.makeNone(); if (property.isMaybeDontDelete()) { configurable = configurable.joinBool(false); } if (property.isMaybeNotDontDelete()) { configurable = configurable.joinBool(true); } Value writable = Value.makeNone(); if (property.isMaybeReadOnly()) { writable = writable.joinBool(false); } if (property.isMaybeNotReadOnly()) { writable = writable.joinBool(true); } Value enumerable = Value.makeNone(); if (property.isMaybeDontEnum()) { enumerable = enumerable.joinBool(false); } if (property.isMaybeNotDontEnum()) { enumerable = enumerable.joinBool(true); } Value value = Value.makeNone(); if (property.isMaybePresentData()) { value = property.restrictToNonAttributes().restrictToNotGetterSetter(); } Set<ObjectLabel> get = newSet(property.getGetters()); get.remove(ObjectLabel.absent_accessor_function); Set<ObjectLabel> set = newSet(property.getSetters()); set.remove(ObjectLabel.absent_accessor_function); return new PropertyDescriptor( enumerable.isNone() ? Value.makeAbsent() : enumerable, configurable.isNone() ? Value.makeAbsent() : configurable, writable.isNone() ? Value.makeAbsent() : writable, value.isNone() ? Value.makeAbsent() : value, get, set); } private static PropertyDescriptor constructAndCheck(Value enumerable, Value configurable, Value writable, Value value, Value get, Value set, Solver.SolverInterface c) { boolean definitelyInvalid = false; if (!get.isMaybeAbsent()) { definitelyInvalid |= checkCallableGetterSetter(get, c); // 8.10.5#7.b } if (!set.isMaybeAbsent()) { definitelyInvalid |= checkCallableGetterSetter(set, c); // 8.10.5#8.b } Set<ObjectLabel> getLabels = get.getObjectLabels().stream().filter(l -> l.getKind() == ObjectLabel.Kind.FUNCTION).collect(Collectors.toSet()); Set<ObjectLabel> setLabels = set.getObjectLabels().stream().filter(l -> l.getKind() == ObjectLabel.Kind.FUNCTION).collect(Collectors.toSet()); PropertyDescriptor descriptor = new PropertyDescriptor(enumerable, configurable, writable, value, getLabels, setLabels); definitelyInvalid |= checkUnambiguous(descriptor, c); // 8.10.5#9 if (definitelyInvalid) { return makeBottomPropertyDescriptor(); } return descriptor; } private static PropertyDescriptor makeBottomPropertyDescriptor() { return new PropertyDescriptor(Value.makeNone(), Value.makeNone(), Value.makeNone(), Value.makeNone(), newSet(), newSet()) { @Override public Value makePropertyWithAttributes() { return Value.makeNone(); } @Override public Optional<ObjectLabel> newPropertyDescriptorObject(Solver.SolverInterface c, Context heapContext) { return Optional.empty(); } }; } private static boolean checkUnambiguous(PropertyDescriptor descriptor, Solver.SolverInterface c) { if (descriptor.isMaybeAccessorDescriptor()) { if (descriptor.isMaybeDataDescriptor()) { Exceptions.throwTypeError(c); } } if (!descriptor.get.isEmpty() || !descriptor.set.isEmpty()) { if (descriptor.writable.isMaybeTrue() || descriptor.value.isNotAbsent()) { // definite exception return true; } } return false; } private static boolean checkCallableGetterSetter(Value f, Solver.SolverInterface c) { f = UnknownValueResolver.getRealValue(f, c.getState()); if (f.restrictToNotUndef().isNone()) { return false; // an explicitly `undefined` getter/setter is ignored } Set<ObjectLabel> labels = f.getObjectLabels(); boolean maybeNonCallable = f.isMaybePrimitiveOrSymbol() || labels.stream().anyMatch(l -> l.getKind() != ObjectLabel.Kind.FUNCTION); boolean onlyNonCallable = !f.isMaybeObject() || labels.stream().allMatch(l -> l.getKind() != ObjectLabel.Kind.FUNCTION); if (maybeNonCallable) { Exceptions.throwTypeError(c); } if (onlyNonCallable) { // definite exception return true; } return false; } private static Value readProperty(Set<ObjectLabel> objects, String propertyName, boolean coerceToBoolean, Solver.SolverInterface c) { PropVarOperations pv = c.getAnalysis().getPropVarOperations(); Value result = Value.makeNone(); Bool hasPropertyName = pv.hasProperty(objects, Value.makeTemporaryStr(propertyName)); if (hasPropertyName.isMaybeTrue()) { // opportunity for small precision gain: the property can be assumed not to be absent Value propertyValue = UnknownValueResolver.getRealValue(pv.readPropertyValue(objects, propertyName), c.getState()); c.getMonitoring().visitPropertyRead(c.getNode(), objects, Value.makeTemporaryStr(propertyName), c.getState(), false); if (coerceToBoolean) { propertyValue = Conversion.toBoolean(UnknownValueResolver.getRealValue(propertyValue, c.getState())); } result = result.join(propertyValue); } if (hasPropertyName.isMaybeFalse()) { result = result.joinAbsent(); } return result; } /** * 8.10.1 */ private boolean isMaybeDataDescriptor() { return value.isMaybePresent(); // we don't track absence/presence of [[Writable]], but usually [[Writable]] is present if [[Value]] is present, so we just omit "|| writable.isMaybeTrue()" } /** * 8.10.2 */ private boolean isMaybeAccessorDescriptor() { return !get.isEmpty() || !set.isEmpty(); } /** * 8.10.3 */ private boolean isMaybeGenericDescriptor() { return !isMaybeDataDescriptor() && !isMaybeAccessorDescriptor(); } private Value getValueOrDefault() { return getOrDefault(value, Value.makeUndef()); } private Value getEnumerableOrDefault() { return getOrDefault(enumerable, Value.makeBool(false)); } private Value getWritableOrDefault() { return getOrDefault(writable, Value.makeBool(false)); } private Value getConfigurableOrDefault() { return getOrDefault(configurable, Value.makeBool(false)); } /** * Helper function for 8.6.1 Table 7. */ private Value getOrDefault(Value result, Value defaultValue) { if (result.isMaybeAbsent()) { result = result.restrictToNotAbsent().join(defaultValue); } return result; } /** * Creates value to be used in {@link PropVarOperations#writePropertyWithAttributes(ObjectLabel, String, Value)}. */ public Value makePropertyWithAttributes() { Value value = getValueOrDefault(); Value enumerable = getEnumerableOrDefault(); Value writable = getWritableOrDefault(); Value configurable = getConfigurableOrDefault(); Value resultValue = Value.makeNone(); if (isMaybeDataDescriptor() || isMaybeGenericDescriptor()) { resultValue = resultValue.join(value.restrictToNotAbsent()); } if (isMaybeAccessorDescriptor()) { resultValue = resultValue.join(Value.makeObject(get).makeGetter()); resultValue = resultValue.join(Value.makeObject(set).makeSetter()); } // TODO: clean up ES3 vs. ES5 terminology if (enumerable.isMaybeTrue()) resultValue = resultValue.setNotDontEnum(); if (enumerable.isMaybeFalse()) resultValue = resultValue.setDontEnum(); if (writable.isMaybeTrue()) resultValue = resultValue.setNotReadOnly(); if (writable.isMaybeFalse()) resultValue = resultValue.setReadOnly(); if (configurable.isMaybeTrue()) resultValue = resultValue.setNotDontDelete(); if (configurable.isMaybeFalse()) resultValue = resultValue.setDontDelete(); return resultValue; } /** * Instantiates a valid property-descriptor object. Invalid descriptor objects are not constructed. */ public Optional<ObjectLabel> newPropertyDescriptorObject(Solver.SolverInterface c, Context heapContext) { PropVarOperations pv = c.getAnalysis().getPropVarOperations(); ObjectLabel desc = ObjectLabel.make(c.getNode(), ObjectLabel.Kind.OBJECT, heapContext); c.getState().newObject(desc); c.getState().writeInternalPrototype(desc, Value.makeObject(InitialStateBuilder.OBJECT_PROTOTYPE)); if (isMaybeDataDescriptor()) { if (value.isMaybePresent()) { pv.writeProperty(desc, "value", value); } pv.writeProperty(desc, "writable", writable); } if (isMaybeAccessorDescriptor()) { if (!get.isEmpty()) { pv.writeProperty(desc, "get", Value.makeObject(get)); } if (!set.isEmpty()) { pv.writeProperty(desc, "set", Value.makeObject(set)); } } pv.writeProperty(desc, "enumerable", enumerable); pv.writeProperty(desc, "configurable", configurable); return Optional.of(desc); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.scan; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.io.CharSource; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.guava.MergeSequence; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.query.DefaultGenericQueryMetricsFactory; import org.apache.druid.query.Druids; import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerFactory; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.TableDataSource; import org.apache.druid.query.select.SelectQueryRunnerTest; import org.apache.druid.segment.IncrementalIndexSegment; import org.apache.druid.segment.Segment; import org.apache.druid.segment.TestIndex; import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.timeline.SegmentId; import org.apache.druid.timeline.partition.NoneShardSpec; import org.joda.time.Interval; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * */ @RunWith(Parameterized.class) public class MultiSegmentScanQueryTest { private static final ScanQueryQueryToolChest toolChest = new ScanQueryQueryToolChest( new ScanQueryConfig(), DefaultGenericQueryMetricsFactory.instance() ); private static final QueryRunnerFactory<ScanResultValue, ScanQuery> factory = new ScanQueryRunnerFactory( toolChest, new ScanQueryEngine(), new ScanQueryConfig() ); // time modified version of druid.sample.numeric.tsv public static final String[] V_0112 = { "2011-01-12T00:00:00.000Z\tspot\tautomotive\t1000\t10000.0\t10000.0\t100000\tpreferred\tapreferred\t100.000000", "2011-01-12T01:00:00.000Z\tspot\tbusiness\t1100\t11000.0\t11000.0\t110000\tpreferred\tbpreferred\t100.000000", "2011-01-12T02:00:00.000Z\tspot\tentertainment\t1200\t12000.0\t12000.0\t120000\tpreferred\tepreferred\t100.000000", "2011-01-12T03:00:00.000Z\tspot\thealth\t1300\t13000.0\t13000.0\t130000\tpreferred\thpreferred\t100.000000", "2011-01-12T04:00:00.000Z\tspot\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t100.000000", "2011-01-12T05:00:00.000Z\tspot\tnews\t1500\t15000.0\t15000.0\t150000\tpreferred\tnpreferred\t100.000000", "2011-01-12T06:00:00.000Z\tspot\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t100.000000", "2011-01-12T07:00:00.000Z\tspot\ttechnology\t1700\t17000.0\t17000.0\t170000\tpreferred\ttpreferred\t100.000000", "2011-01-12T08:00:00.000Z\tspot\ttravel\t1800\t18000.0\t18000.0\t180000\tpreferred\ttpreferred\t100.000000", "2011-01-12T09:00:00.000Z\ttotal_market\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t1000.000000", "2011-01-12T10:00:00.000Z\ttotal_market\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t1000.000000", "2011-01-12T11:00:00.000Z\tupfront\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t800.000000\tvalue", "2011-01-12T12:00:00.000Z\tupfront\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t800.000000\tvalue" }; public static final String[] V_0113 = { "2011-01-13T00:00:00.000Z\tspot\tautomotive\t1000\t10000.0\t10000.0\t100000\tpreferred\tapreferred\t94.874713", "2011-01-13T01:00:00.000Z\tspot\tbusiness\t1100\t11000.0\t11000.0\t110000\tpreferred\tbpreferred\t103.629399", "2011-01-13T02:00:00.000Z\tspot\tentertainment\t1200\t12000.0\t12000.0\t120000\tpreferred\tepreferred\t110.087299", "2011-01-13T03:00:00.000Z\tspot\thealth\t1300\t13000.0\t13000.0\t130000\tpreferred\thpreferred\t114.947403", "2011-01-13T04:00:00.000Z\tspot\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t104.465767", "2011-01-13T05:00:00.000Z\tspot\tnews\t1500\t15000.0\t15000.0\t150000\tpreferred\tnpreferred\t102.851683", "2011-01-13T06:00:00.000Z\tspot\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t108.863011", "2011-01-13T07:00:00.000Z\tspot\ttechnology\t1700\t17000.0\t17000.0\t170000\tpreferred\ttpreferred\t111.356672", "2011-01-13T08:00:00.000Z\tspot\ttravel\t1800\t18000.0\t18000.0\t180000\tpreferred\ttpreferred\t106.236928", "2011-01-13T09:00:00.000Z\ttotal_market\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t1040.945505", "2011-01-13T10:00:00.000Z\ttotal_market\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t1689.012875", "2011-01-13T11:00:00.000Z\tupfront\tmezzanine\t1400\t14000.0\t14000.0\t140000\tpreferred\tmpreferred\t826.060182\tvalue", "2011-01-13T12:00:00.000Z\tupfront\tpremium\t1600\t16000.0\t16000.0\t160000\tpreferred\tppreferred\t1564.617729\tvalue" }; private static Segment segment0; private static Segment segment1; @BeforeClass public static void setup() throws IOException { CharSource v_0112 = CharSource.wrap(StringUtils.join(V_0112, "\n")); CharSource v_0113 = CharSource.wrap(StringUtils.join(V_0113, "\n")); IncrementalIndex index0 = TestIndex.loadIncrementalIndex(newIndex("2011-01-12T00:00:00.000Z"), v_0112); IncrementalIndex index1 = TestIndex.loadIncrementalIndex(newIndex("2011-01-13T00:00:00.000Z"), v_0113); segment0 = new IncrementalIndexSegment(index0, makeIdentifier(index0, "v1")); segment1 = new IncrementalIndexSegment(index1, makeIdentifier(index1, "v1")); } private static SegmentId makeIdentifier(IncrementalIndex index, String version) { return makeIdentifier(index.getInterval(), version); } private static SegmentId makeIdentifier(Interval interval, String version) { return SegmentId.of(QueryRunnerTestHelper.dataSource, interval, version, NoneShardSpec.instance()); } private static IncrementalIndex newIndex(String minTimeStamp) { return newIndex(minTimeStamp, 10000); } private static IncrementalIndex newIndex(String minTimeStamp, int maxRowCount) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(DateTimes.of(minTimeStamp).getMillis()) .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); return new IncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(maxRowCount) .buildOnheap(); } @AfterClass public static void clear() { IOUtils.closeQuietly(segment0); IOUtils.closeQuietly(segment1); } @Parameterized.Parameters(name = "limit={0},batchSize={1}") public static Iterable<Object[]> constructorFeeder() { return QueryRunnerTestHelper.cartesian( Arrays.asList(0, 1, 3, 7, 10, 20, 1000), Arrays.asList(0, 1, 3, 6, 7, 10, 123, 2000) ); } private final int limit; private final int batchSize; public MultiSegmentScanQueryTest(int limit, int batchSize) { this.limit = limit; this.batchSize = batchSize; } private Druids.ScanQueryBuilder newBuilder() { return Druids.newScanQueryBuilder() .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) .intervals(SelectQueryRunnerTest.I_0112_0114_SPEC) .batchSize(batchSize) .columns(Collections.emptyList()) .legacy(false) .limit(limit); } @Test public void testMergeRunnersWithLimit() { ScanQuery query = newBuilder().build(); List<ScanResultValue> results = factory .mergeRunners( Execs.directExecutor(), ImmutableList.of(factory.createRunner(segment0), factory.createRunner(segment1)) ) .run(QueryPlus.wrap(query), new HashMap<>()) .toList(); int totalCount = 0; for (ScanResultValue result : results) { System.out.println(((List) result.getEvents()).size()); totalCount += ((List) result.getEvents()).size(); } Assert.assertEquals( totalCount, limit != 0 ? Math.min(limit, V_0112.length + V_0113.length) : V_0112.length + V_0113.length ); } @Test public void testMergeResultsWithLimit() { QueryRunner<ScanResultValue> runner = toolChest.mergeResults( new QueryRunner<ScanResultValue>() { @Override public Sequence<ScanResultValue> run( QueryPlus<ScanResultValue> queryPlus, Map<String, Object> responseContext ) { // simulate results back from 2 historicals List<Sequence<ScanResultValue>> sequences = Lists.newArrayListWithExpectedSize(2); sequences.add(factory.createRunner(segment0).run(queryPlus, new HashMap<>())); sequences.add(factory.createRunner(segment1).run(queryPlus, new HashMap<>())); return new MergeSequence<>( queryPlus.getQuery().getResultOrdering(), Sequences.simple(sequences) ); } } ); ScanQuery query = newBuilder().build(); List<ScanResultValue> results = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); int totalCount = 0; for (ScanResultValue result : results) { totalCount += ((List) result.getEvents()).size(); } Assert.assertEquals( totalCount, limit != 0 ? Math.min(limit, V_0112.length + V_0113.length) : V_0112.length + V_0113.length ); } }
/** * */ package mvm.rya.indexing; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.codec.binary.StringUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; /** * Immutable date and time instance returning a human readable key. * Preserves the Time zone, but not stored in the key. * Converts fields (hours, etc) correctly for tz=Zulu when stored, * so the original timezone is not preserved when retrieved. * * Uses rfc 3339, which looks like: YYYY-MM-DDThh:mm:ssZ a subset * of ISO-8601 : https://www.ietf.org/rfc/rfc3339.txt * * Limits: All dates and times are assumed to be in the "current era", no BC, * somewhere between 0000AD and 9999AD. * * Resolution: to the second, or millisecond if the optional fraction is used. * * This is really a wrapper for Joda DateTime. if you need functionality from * that wonderful class, simply use t.getAsDateTime(). * */ public class TemporalInstantRfc3339 implements TemporalInstant { private static final long serialVersionUID = -7790000399142290309L; private final DateTime dateTime; /** * Format key like this: YYYY-MM-DDThh:mm:ssZ */ public final static DateTimeFormatter FORMATTER = ISODateTimeFormat.dateTimeNoMillis(); public static final Pattern PATTERN = Pattern.compile("\\[(.*)\\,(.*)\\].*"); /** * New date assumed UTC time zone. * * @param year * @param month * @param day * @param hour * @param minute * @param second */ public TemporalInstantRfc3339(final int year, final int month, final int day, final int hour, final int minute, final int second) { dateTime = new DateTime(year, month, day, hour, minute, second, DateTimeZone.UTC); } /** * Construct with a Joda/java v8 DateTime; * TZ is preserved, but not in the key. * * @param dateTime * initialize with this date time. Converted to zulu time zone for key generation. * @return */ public TemporalInstantRfc3339(final DateTime datetime) { dateTime = datetime; } /** * Get an interval setting beginning and end with this implementation of {@link TemporalInstant}. * beginning must be less than end. * * @param dateTimeInterval String in the form [dateTime1,dateTime2] */ public static TemporalInterval parseInterval(final String dateTimeInterval) { final Matcher matcher = PATTERN.matcher(dateTimeInterval); if (matcher.find()) { // Got a date time pair, parse into an interval. return new TemporalInterval( new TemporalInstantRfc3339(new DateTime(matcher.group(1))), new TemporalInstantRfc3339(new DateTime(matcher.group(2)))); } throw new IllegalArgumentException("Can't parse interval, expecting '[ISO8601dateTime1,ISO8601dateTime2]', actual: "+dateTimeInterval); } /** * if this is older returns -1, equal 0, else 1 * */ @Override public int compareTo(final TemporalInstant that) { return getAsKeyString().compareTo(that.getAsKeyString()); } @Override public byte[] getAsKeyBytes() { return StringUtils.getBytesUtf8(getAsKeyString()); } @Override public String getAsKeyString() { return dateTime.withZone(DateTimeZone.UTC).toString(FORMATTER); } /** * Readable string, formated local time at {@link DateTimeZone}. * If the timezone is UTC (Z), it was probably a key from the database. * If the server and client are in different Time zone, should probably use the client timezone. * * Time at specified time zone: * instant.getAsReadable(DateTimeZone.forID("-05:00"))); * instant.getAsReadable(DateTimeZone.getDefault())); * * Use original time zone set in the constructor: * instant.getAsDateTime().toString(TemporalInstantRfc3339.FORMATTER)); * */ @Override public String getAsReadable(final DateTimeZone dateTimeZone) { return dateTime.withZone(dateTimeZone).toString(FORMATTER); } /** * Use original time zone set in the constructor, or UTC if from parsing the key. */ @Override public String getAsReadable() { return dateTime.toString(FORMATTER); } /** * default toString, same as getAsReadable(). */ @Override public String toString() { return getAsReadable(); } /** * Show readable time converted to the default timezone. */ @Override public DateTime getAsDateTime() { return dateTime; } /** * Minimum Date, used for infinitely past. */ private static final TemporalInstant MINIMUM = new TemporalInstantRfc3339(new DateTime(Long.MIN_VALUE)); /** * maximum date/time is used for infinitely in the future. */ private static final TemporalInstant MAXIMUM = new TemporalInstantRfc3339(new DateTime(Long.MAX_VALUE)); /** * infinite past date. * @return an instant that will compare as NEWER than anything but itself. */ public static TemporalInstant getMinimumInstance() { return MINIMUM; } /** * infinite future date. * @return an instant that will compare as OLDER than anything but itself */ public static TemporalInstant getMaximumInstance() { return MAXIMUM; } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ @Override public int hashCode() { return getAsKeyString().hashCode(); } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final TemporalInstantRfc3339 other = (TemporalInstantRfc3339) obj; return (getAsKeyString().equals(other.getAsKeyString())); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.stratos.kubernetes.client; import io.fabric8.kubernetes.api.KubernetesClient; import io.fabric8.kubernetes.api.model.*; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.stratos.kubernetes.client.exceptions.KubernetesClientException; import org.apache.stratos.kubernetes.client.interfaces.KubernetesAPIClientInterface; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class KubernetesApiClient implements KubernetesAPIClientInterface { private static final Log log = LogFactory.getLog(KubernetesApiClient.class); private KubernetesClient kubernetesClient; public KubernetesApiClient(String endpointUrl) { kubernetesClient = new KubernetesClient(endpointUrl); } /** * Create new pod * * @param podId Identifier of the pod * @param podName Pod name to be used by the pod label * @param podLabels Map of labels to be applied to the pod * @param annotations Map of annotations to be applied to the pod * @param dockerImage Docker image to be used by the pod * @param cpu Number of cpu cores * @param memory Memory allocation in megabytes * @param ports Ports exposed by the pod * @param environmentVariables Environment variables to be passed to the pod * @throws KubernetesClientException */ @Override public void createPod(String podId, String podName, Map<String, String> podLabels, Map<String, String> annotations, String dockerImage, String cpu, String memory, List<ContainerPort> ports, List<EnvVar> environmentVariables) throws KubernetesClientException { try { if (log.isDebugEnabled()) { log.debug(String.format("Creating kubernetes pod: [pod-id] %s [pod-name] %s [docker-image] %s " + "[cpu] %s [memory] %s [ports] %s", podId, podLabels, dockerImage, cpu, memory, ports)); } // Create pod definition Pod pod = new Pod(); pod.setApiVersion(Pod.ApiVersion.V_1); pod.setKind(KubernetesConstants.KIND_POD); pod.setSpec(new PodSpec()); pod.setMetadata(new ObjectMeta()); pod.getMetadata().setName(podId); pod.getMetadata().setLabels(podLabels); pod.getMetadata().setAnnotations(annotations); // Set container template Container containerTemplate = new Container(); containerTemplate.setName(podName); containerTemplate.setImage(dockerImage); containerTemplate.setEnv(environmentVariables); List<Container> containerTemplates = new ArrayList<Container>(); containerTemplates.add(containerTemplate); pod.getSpec().setContainers(containerTemplates); // Set resource limits ResourceRequirements resources = new ResourceRequirements(); Map<String, Quantity> limits = new HashMap<String, Quantity>(); limits.put(KubernetesConstants.RESOURCE_CPU, new Quantity(cpu)); limits.put(KubernetesConstants.RESOURCE_MEMORY, new Quantity(memory)); resources.setLimits(limits); containerTemplate.setResources(resources); containerTemplate.setPorts(ports); containerTemplate.setImagePullPolicy(KubernetesConstants.POLICY_PULL_IF_NOT_PRESENT); if (environmentVariables != null) { containerTemplate.setEnv(environmentVariables); } // Invoke the api to create the pod kubernetesClient.createPod(pod); if (log.isDebugEnabled()) { log.debug(String.format("Kubernetes pod created successfully: [pod-id] %s", podId)); } } catch (Exception e) { String msg = String.format("Could not create kubernetes pod: [pod-id] %s", podId); log.error(msg, e); throw new KubernetesClientException(msg, e); } } @Override public Pod getPod(String podId) throws KubernetesClientException { try { return kubernetesClient.getPod(podId); } catch (Exception e) { String msg = String.format("Could not retrieve kubernetes pod: [pod-id] %s", podId); log.error(msg, e); throw new KubernetesClientException(msg, e); } } @Override public List<Pod> getPods() throws KubernetesClientException { try { return kubernetesClient.getPods().getItems(); } catch (Exception e) { String msg = "Error while retrieving kubernetes pods."; log.error(msg, e); throw new KubernetesClientException(msg, e); } } @Override public void deletePod(String podId) throws KubernetesClientException { try { kubernetesClient.deletePod(podId); } catch (Exception e) { String message = String.format("Could not delete kubernetes pod: [pod-id] %s", podId); log.error(message, e); throw new KubernetesClientException(message, e); } } /** * Create kubernetes service * * @param serviceId Service id * @param serviceName Service name to be used by the label name * @param serviceLabels Service labels map * @param annotations Map of annotations to be applied to the service * @param servicePort Port to be exposed by the kubernetes node * @param containerPortName Container port name defined in the port label * @param containerPort Container port * @param sessionAffinity Session affinity configuration * @param serviceType Service type * @throws KubernetesClientException */ @Override public void createService(String serviceId, String serviceName, Map<String, String> serviceLabels, Map<String, String> annotations, int servicePort, String serviceType, String containerPortName, int containerPort, String sessionAffinity) throws KubernetesClientException { try { if (log.isDebugEnabled()) { log.debug( String.format("Creating kubernetes service: [service-id] %s [service-name] %s [service-port] " + "%d [container-port-name] %s [service-type] %s", serviceId, serviceName, servicePort, containerPortName, serviceType)); } // Create service definition Service service = new Service(); service.setSpec(new ServiceSpec()); service.setMetadata(new ObjectMeta()); service.setApiVersion(Service.ApiVersion.V_1); service.setKind(KubernetesConstants.KIND_SERVICE); service.getMetadata().setName(serviceId); service.getSpec().setSessionAffinity(sessionAffinity); service.getMetadata().setAnnotations(annotations); if (serviceType.equals(KubernetesConstants.NODE_PORT)) { service.getSpec().setType(KubernetesConstants.NODE_PORT); } else { service.getSpec().setType(KubernetesConstants.CLUSTER_IP); } // Set port List<ServicePort> ports = new ArrayList<ServicePort>(); ServicePort port = new ServicePort(); port.setName(containerPortName); port.setPort(containerPort); port.setTargetPort(new IntOrString(containerPort)); if (serviceType.equals(KubernetesConstants.NODE_PORT)) { port.setNodePort(servicePort); } ports.add(port); service.getSpec().setPorts(ports); // Set labels service.getMetadata().setLabels(serviceLabels); // Set service selector Map<String, String> selector = new HashMap<String, String>(); selector.put(KubernetesConstants.SERVICE_SELECTOR_LABEL, serviceName); service.getSpec().setSelector(selector); // Invoke the api to create the service kubernetesClient.createService(service); if (log.isDebugEnabled()) { log.debug(String.format("Kubernetes service created successfully: [service-id] %s [service-name] %s " + "[node-port] %d [container-port-name] %s [container-port] %d", serviceId, serviceName, servicePort, containerPortName, containerPort)); } } catch (Exception e) { String message = String.format("Could not create kubernetes service: [service-id] %s [service-name] %s " + "[node-port] %d [container-port-name] %s [container-port] %d", serviceId, serviceName, servicePort, containerPortName, containerPort); log.error(message, e); throw new KubernetesClientException(message, e); } } @Override public Service getService(String serviceId) throws KubernetesClientException { try { return kubernetesClient.getService(serviceId); } catch (Exception e) { String msg = String.format("Could not retrieve kubernetes service: [service-id] %s", serviceId); log.error(msg, e); throw new KubernetesClientException(msg, e); } } @Override public List<Service> getServices() throws KubernetesClientException { try { return kubernetesClient.getServices().getItems(); } catch (Exception e) { String msg = "Could not retrieve kubernetes services"; log.error(msg, e); throw new KubernetesClientException(msg, e); } } @Override public void deleteService(String serviceId) throws KubernetesClientException { try { if (log.isDebugEnabled()) { log.debug(String.format("Deleting kubernetes service: [service-id] %s", serviceId)); } kubernetesClient.deleteService(serviceId); if (log.isDebugEnabled()) { log.debug(String.format("Kubernetes service deleted successfully: [service-id] %s", serviceId)); } } catch (Exception e) { String msg = String.format("Could not delete kubernetes service: [service-id] %s", serviceId); log.error(msg, e); throw new KubernetesClientException(msg, e); } } }
package io.sunrisedata.wikipedia; /** * Created by evamonsen on 9/10/15. */ /* * Cloud9: A MapReduce Library for Hadoop * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodecFactory; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.apache.log4j.Logger; import org.xml.sax.SAXException; import javax.xml.parsers.ParserConfigurationException; import java.io.DataInputStream; import java.io.IOException; import java.util.Arrays; /** * Hadoop {@code InputFormat} for processing Wikipedia page REVISIONS from the XML dumps. * * A bit of explanation. Hadoop provides randomly sized chunks of files which may not * entirely contain wikipedia &lt;page&gt; elements. * * We are allowed to read from the start of the split, and may read beyond the end of * the split. * * This class will read all of the pages that *start* within the split, even if they * end outside of the split. * * The revision key is the file path + start position of the revision, which should * uniquely identify the revision within a set of multiple input files. * * The value is the raw xml of the revision, including the &lt;revision&gt; start and end tags. * * @author Eva Monsen * @author Jimmy Lin * @author Peter Exner */ public class WikipediaPageRevisionInputFormat extends FileInputFormat<Text, WikipediaPageRevision> { @Override public RecordReader<Text, WikipediaPageRevision> createRecordReader( InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { return new WikipediaPageRevisionRecordReader(); } public static class WikipediaPageRevisionRecordReader extends RecordReader<Text, WikipediaPageRevision> { private static final Logger LOG = Logger.getLogger(WikipediaPageRevisionRecordReader.class); private WikipediaPageRevision revision; private WikipediaPage page; private String language; private byte[] revisionStartTag; private byte[] revisionEndTag; private byte[] pageStartTagBytes; private byte[] pageEndTag; byte[][] revisionSearchTags ; private long start; private long end; private long pos; private DataInputStream fsin = null; private DataOutputBuffer buffer = new DataOutputBuffer(); private long recordStartPos; // keep track of state private boolean inPage = false; private final Text key = new Text(); private WikipediaPageRevision value; private long pageStartPos; private Path file; private long revisionStartPos; private static final String PAGE_START_TAG = "<page>"; private static final String REVISION_START_TAG = "<revision>"; /** * Called once at initialization. * * @param input the split that defines the range of records to readFromXml * @param context the information about the task * @throws IOException * @throws InterruptedException */ @Override public void initialize(InputSplit input, TaskAttemptContext context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); this.pageStartTagBytes = PAGE_START_TAG.getBytes("utf-8"); this.pageEndTag = "</page>".getBytes("utf-8"); this.revisionStartTag = REVISION_START_TAG.getBytes("utf-8"); this.revisionEndTag = "</revision>".getBytes("utf-8"); byte[][] b = {revisionStartTag, pageEndTag}; revisionSearchTags = b; FileSplit split = (FileSplit) input; start = split.getStart(); file = split.getPath(); CompressionCodecFactory compressionCodecs = new CompressionCodecFactory(conf); CompressionCodec codec = compressionCodecs.getCodec(file); FileSystem fs = file.getFileSystem(conf); if (codec != null) { LOG.info("Reading compressed file " + file + "..."); fsin = new DataInputStream(codec.createInputStream(fs.open(file))); end = Long.MAX_VALUE; } else { LOG.info("Reading uncompressed file " + file + "..."); FSDataInputStream fileIn = fs.open(file); fileIn.seek(start); fsin = fileIn; end = start + split.getLength(); } recordStartPos = start; // Because input streams of gzipped files are not seekable, we need to keep track of bytes // consumed ourselves. pos = start; } /** * Read the next key, value pair, provided the page * * @return {@code true} if a key/value pair was readFromXml * @throws IOException * @throws InterruptedException */ @Override public boolean nextKeyValue() throws IOException, InterruptedException { while(true) { // find page start if we're not in one if (!inPage) { LOG.debug("Searchign for start page"); buffer.reset(); if (readUntilMatch(pageStartTagBytes, false)) { pageStartPos = pos - pageStartTagBytes.length; inPage = true; LOG.debug("Page start is at "+pageStartPos); } else { LOG.debug("No page start tag found within block"); return false; } } // look for either a revision start tag, or a page end tag switch (readUntilMatch(revisionSearchTags, true)) { case 0: // revision if(page == null) { LOG.debug("reading page"); page = new WikipediaPage(); try { String xml = PAGE_START_TAG+new String(buffer.getData(), 0, buffer.getLength()-revisionStartTag.length, "utf-8")+"</page>"; LOG.debug("Page Xml = "+xml); page.readFromXml(xml); } catch (ParserConfigurationException e) { LOG.error("Error reading WikipediaPage", e); } catch (SAXException e) { LOG.error("Error reading WikipediaPage", e); } } // find the whole xml of the revision revisionStartPos = pos - revisionStartTag.length; buffer.reset(); buffer.write(revisionStartTag); if (readUntilMatch(revisionEndTag, true)) { //key.set(file + Long.toString(recordStartPos)); value = new WikipediaPageRevision(page); try { String xml = new String(buffer.getData(), 0, buffer.getLength(), "utf-8"); LOG.debug("revision xml = " + xml); value.readFromXml(xml); key.set(value.getContainingPage().getPageId() + "_" + value.getRevisionId()); } catch (ParserConfigurationException e) { LOG.error("Error reading WikipediaPageRevision in page " + value.getContainingPage().getPageId(), e); } catch (SAXException e) { LOG.error("Error reading WikipediaPageRevision in page " + value.getContainingPage().getPageId(), e); } return true; } // didn't find revision end. that's weird. log it and run away LOG.error("no end tag for revision starting at position " + revisionStartPos + " in file " + file); return false; case 1: // end page // no more revisions for this page so go find start of next page. // note: this is the only case where we go through the while loop again inPage = false; page = null; break; default: // didn't find revision start OR page end. that's weird. log it and run away LOG.error("no end tag for page starting at position " + pageStartPos + " in file " + file + ""); return false; } } } /** * Returns the current key. * * @return the current key or {@code null} if there is no current key * @throws IOException * @throws InterruptedException */ @Override public Text getCurrentKey() throws IOException, InterruptedException { return key; } /** * Returns the current value. * * @return current value * @throws IOException * @throws InterruptedException */ @Override public WikipediaPageRevision getCurrentValue() throws IOException, InterruptedException { return value; } /** * Closes the record reader. */ @Override public void close() throws IOException { fsin.close(); } /** * The current progress of the record reader through its data. * * @return a number between 0.0 and 1.0 that is the fraction of the data readFromXml * @throws IOException */ @Override public float getProgress() throws IOException { return ((float) (pos - start)) / ((float) (end - start)); } /** * * @param match * @param saveToBuffer * @return * @throws IOException */ private boolean readUntilMatch(byte[] match, boolean saveToBuffer) throws IOException { byte[][] matches = {match}; return (readUntilMatch(matches, saveToBuffer) == 0); } /** * * @param matches * @param saveToBuffer whether to save bytes readFromXml to a buffer for later use. * @return index of the match, otherwise -1 * @throws IOException */ private int readUntilMatch(byte[][] matches, boolean saveToBuffer) throws IOException { int[] i = new int[matches.length]; // should be initialized to all zeroes while (true) { int b = fsin.read(); // increment position (bytes consumed) pos++; // end of file: if (b == -1) return -1; // save to buffer: if (saveToBuffer) buffer.write(b); // check if we're matching: for(int m = 0; m < matches.length; m++) { byte[] match = matches[m]; if (b == match[i[m]]) { i[m]++; if (i[m] >= match.length) { LOG.debug("Match "+m+" found at pos " + pos); return m; } } else { i[m] = 0; } // see if we've passed the stop point: if (!saveToBuffer && Arrays.stream(i).sum() == 0 && pos >= end) { LOG.debug("readUntilMatch returning -1"); return -1; } } } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing.allocation.decider; import java.util.Set; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.cluster.ClusterInfo; import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING; import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING; /** * The {@link DiskThresholdDecider} checks that the node a shard is potentially * being allocated to has enough disk space. * * It has three configurable settings, all of which can be changed dynamically: * * <code>cluster.routing.allocation.disk.watermark.low</code> is the low disk * watermark. New shards will not allocated to a node with usage higher than this, * although this watermark may be passed by allocating a shard. It defaults to * 0.85 (85.0%). * * <code>cluster.routing.allocation.disk.watermark.high</code> is the high disk * watermark. If a node has usage higher than this, shards are not allowed to * remain on the node. In addition, if allocating a shard to a node causes the * node to pass this watermark, it will not be allowed. It defaults to * 0.90 (90.0%). * * Both watermark settings are expressed in terms of used disk percentage, or * exact byte values for free space (like "500mb") * * <code>cluster.routing.allocation.disk.threshold_enabled</code> is used to * enable or disable this decider. It defaults to false (disabled). */ public class DiskThresholdDecider extends AllocationDecider { public static final String NAME = "disk_threshold"; private final DiskThresholdSettings diskThresholdSettings; public DiskThresholdDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); this.diskThresholdSettings = new DiskThresholdSettings(settings, clusterSettings); } /** * Returns the size of all shards that are currently being relocated to * the node, but may not be finished transferring yet. * * If subtractShardsMovingAway is true then the size of shards moving away is subtracted from the total size of all shards */ static long sizeOfRelocatingShards(RoutingNode node, RoutingAllocation allocation, boolean subtractShardsMovingAway, String dataPath) { ClusterInfo clusterInfo = allocation.clusterInfo(); long totalSize = 0; for (ShardRouting routing : node.shardsWithState(ShardRoutingState.RELOCATING, ShardRoutingState.INITIALIZING)) { String actualPath = clusterInfo.getDataPath(routing); if (dataPath.equals(actualPath)) { if (routing.initializing() && routing.relocatingNodeId() != null) { totalSize += getExpectedShardSize(routing, allocation, 0); } else if (subtractShardsMovingAway && routing.relocating()) { totalSize -= getExpectedShardSize(routing, allocation, 0); } } } return totalSize; } @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { ClusterInfo clusterInfo = allocation.clusterInfo(); ImmutableOpenMap<String, DiskUsage> usages = clusterInfo.getNodeMostAvailableDiskUsages(); final Decision decision = earlyTerminate(allocation, usages); if (decision != null) { return decision; } final double usedDiskThresholdLow = 100.0 - diskThresholdSettings.getFreeDiskThresholdLow(); final double usedDiskThresholdHigh = 100.0 - diskThresholdSettings.getFreeDiskThresholdHigh(); // subtractLeavingShards is passed as false here, because they still use disk space, and therefore should we should be extra careful // and take the size into account DiskUsage usage = getDiskUsage(node, allocation, usages, false); // First, check that the node currently over the low watermark double freeDiskPercentage = usage.getFreeDiskAsPercentage(); // Cache the used disk percentage for displaying disk percentages consistent with documentation double usedDiskPercentage = usage.getUsedDiskAsPercentage(); long freeBytes = usage.getFreeBytes(); if (logger.isTraceEnabled()) { logger.trace("node [{}] has {}% used disk", node.nodeId(), usedDiskPercentage); } // flag that determines whether the low threshold checks below can be skipped. We use this for a primary shard that is freshly // allocated and empty. boolean skipLowTresholdChecks = shardRouting.primary() && shardRouting.active() == false && shardRouting.recoverySource().getType() == RecoverySource.Type.EMPTY_STORE; // checks for exact byte comparisons if (freeBytes < diskThresholdSettings.getFreeBytesThresholdLow().getBytes()) { if (skipLowTresholdChecks == false) { if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, preventing allocation", diskThresholdSettings.getFreeBytesThresholdLow(), freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, "the node is above the low watermark [%s=%s], having less than the minimum required [%s] free space, actual free: [%s]", CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getLowWatermarkRaw(), diskThresholdSettings.getFreeBytesThresholdLow(), new ByteSizeValue(freeBytes)); } else if (freeBytes > diskThresholdSettings.getFreeBytesThresholdHigh().getBytes()) { // Allow the shard to be allocated because it is primary that // has never been allocated if it's under the high watermark if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " + "but allowing allocation because primary has never been allocated", diskThresholdSettings.getFreeBytesThresholdLow(), freeBytes, node.nodeId()); } return allocation.decision(Decision.YES, NAME, "the node is above the low watermark, but less than the high watermark, and this primary shard has " + "never been allocated before"); } else { // Even though the primary has never been allocated, the node is // above the high watermark, so don't allow allocating the shard if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " + "preventing allocation even though primary has never been allocated", diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, "the node is above the high watermark [%s=%s], having less than the minimum required [%s] free space, " + "actual free: [%s]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytes)); } } // checks for percentage comparisons if (freeDiskPercentage < diskThresholdSettings.getFreeDiskThresholdLow()) { // If the shard is a replica or is a non-empty primary, check the low threshold if (skipLowTresholdChecks == false) { if (logger.isDebugEnabled()) { logger.debug("more than the allowed {} used disk threshold ({} used) on node [{}], preventing allocation", Strings.format1Decimals(usedDiskThresholdLow, "%"), Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.NO, NAME, "the node is above the low watermark [%s=%s], using more disk space than the maximum allowed [%s%%], " + "actual free: [%s%%]", CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getLowWatermarkRaw(), usedDiskThresholdLow, freeDiskPercentage); } else if (freeDiskPercentage > diskThresholdSettings.getFreeDiskThresholdHigh()) { // Allow the shard to be allocated because it is primary that // has never been allocated if it's under the high watermark if (logger.isDebugEnabled()) { logger.debug("more than the allowed {} used disk threshold ({} used) on node [{}], " + "but allowing allocation because primary has never been allocated", Strings.format1Decimals(usedDiskThresholdLow, "%"), Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.YES, NAME, "the node is above the low watermark, but less than the high watermark, and this primary shard has " + "never been allocated before"); } else { // Even though the primary has never been allocated, the node is // above the high watermark, so don't allow allocating the shard if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, " + "preventing allocation even though primary has never been allocated", Strings.format1Decimals(diskThresholdSettings.getFreeDiskThresholdHigh(), "%"), Strings.format1Decimals(freeDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.NO, NAME, "the node is above the high watermark [%s=%s], using more disk space than the maximum allowed [%s%%], " + "actual free: [%s%%]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), usedDiskThresholdHigh, freeDiskPercentage); } } // Secondly, check that allocating the shard to this node doesn't put it above the high watermark final long shardSize = getExpectedShardSize(shardRouting, allocation, 0); double freeSpaceAfterShard = freeDiskPercentageAfterShardAssigned(usage, shardSize); long freeBytesAfterShard = freeBytes - shardSize; if (freeBytesAfterShard < diskThresholdSettings.getFreeBytesThresholdHigh().getBytes()) { logger.warn("after allocating, node [{}] would have less than the required " + "{} free bytes threshold ({} bytes free), preventing allocation", node.nodeId(), diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytesAfterShard); return allocation.decision(Decision.NO, NAME, "allocating the shard to this node will bring the node above the high watermark [%s=%s] " + "and cause it to have less than the minimum required [%s] of free space (free bytes after shard added: [%s])", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytesAfterShard)); } if (freeSpaceAfterShard < diskThresholdSettings.getFreeDiskThresholdHigh()) { logger.warn("after allocating, node [{}] would have more than the allowed " + "{} free disk threshold ({} free), preventing allocation", node.nodeId(), Strings.format1Decimals(diskThresholdSettings.getFreeDiskThresholdHigh(), "%"), Strings.format1Decimals(freeSpaceAfterShard, "%")); return allocation.decision(Decision.NO, NAME, "allocating the shard to this node will bring the node above the high watermark [%s=%s] " + "and cause it to use more disk space than the maximum allowed [%s%%] (free space after shard added: [%s%%])", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), usedDiskThresholdHigh, freeSpaceAfterShard); } return allocation.decision(Decision.YES, NAME, "enough disk for shard on node, free: [%s], shard size: [%s], free after allocating shard: [%s]", new ByteSizeValue(freeBytes), new ByteSizeValue(shardSize), new ByteSizeValue(freeBytesAfterShard)); } @Override public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (shardRouting.currentNodeId().equals(node.nodeId()) == false) { throw new IllegalArgumentException("Shard [" + shardRouting + "] is not allocated on node: [" + node.nodeId() + "]"); } final ClusterInfo clusterInfo = allocation.clusterInfo(); final ImmutableOpenMap<String, DiskUsage> usages = clusterInfo.getNodeLeastAvailableDiskUsages(); final Decision decision = earlyTerminate(allocation, usages); if (decision != null) { return decision; } // subtractLeavingShards is passed as true here, since this is only for shards remaining, we will *eventually* have enough disk // since shards are moving away. No new shards will be incoming since in canAllocate we pass false for this check. final DiskUsage usage = getDiskUsage(node, allocation, usages, true); final String dataPath = clusterInfo.getDataPath(shardRouting); // If this node is already above the high threshold, the shard cannot remain (get it off!) final double freeDiskPercentage = usage.getFreeDiskAsPercentage(); final long freeBytes = usage.getFreeBytes(); if (logger.isTraceEnabled()) { logger.trace("node [{}] has {}% free disk ({} bytes)", node.nodeId(), freeDiskPercentage, freeBytes); } if (dataPath == null || usage.getPath().equals(dataPath) == false) { return allocation.decision(Decision.YES, NAME, "this shard is not allocated on the most utilized disk and can remain"); } if (freeBytes < diskThresholdSettings.getFreeBytesThresholdHigh().getBytes()) { if (logger.isDebugEnabled()) { logger.debug("less than the required {} free bytes threshold ({} bytes free) on node {}, shard cannot remain", diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, "the shard cannot remain on this node because it is above the high watermark [%s=%s] " + "and there is less than the required [%s] free space on node, actual free: [%s]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytes)); } if (freeDiskPercentage < diskThresholdSettings.getFreeDiskThresholdHigh()) { if (logger.isDebugEnabled()) { logger.debug("less than the required {}% free disk threshold ({}% free) on node {}, shard cannot remain", diskThresholdSettings.getFreeDiskThresholdHigh(), freeDiskPercentage, node.nodeId()); } return allocation.decision(Decision.NO, NAME, "the shard cannot remain on this node because it is above the high watermark [%s=%s] " + "and there is less than the required [%s%%] free disk on node, actual free: [%s%%]", CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), diskThresholdSettings.getHighWatermarkRaw(), diskThresholdSettings.getFreeDiskThresholdHigh(), freeDiskPercentage); } return allocation.decision(Decision.YES, NAME, "there is enough disk on this node for the shard to remain, free: [%s]", new ByteSizeValue(freeBytes)); } private DiskUsage getDiskUsage(RoutingNode node, RoutingAllocation allocation, ImmutableOpenMap<String, DiskUsage> usages, boolean subtractLeavingShards) { DiskUsage usage = usages.get(node.nodeId()); if (usage == null) { // If there is no usage, and we have other nodes in the cluster, // use the average usage for all nodes as the usage for this node usage = averageUsage(node, usages); if (logger.isDebugEnabled()) { logger.debug("unable to determine disk usage for {}, defaulting to average across nodes [{} total] [{} free] [{}% free]", node.nodeId(), usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeDiskAsPercentage()); } } if (diskThresholdSettings.includeRelocations()) { long relocatingShardsSize = sizeOfRelocatingShards(node, allocation, subtractLeavingShards, usage.getPath()); DiskUsage usageIncludingRelocations = new DiskUsage(node.nodeId(), node.node().getName(), usage.getPath(), usage.getTotalBytes(), usage.getFreeBytes() - relocatingShardsSize); if (logger.isTraceEnabled()) { logger.trace("usage without relocations: {}", usage); logger.trace("usage with relocations: [{} bytes] {}", relocatingShardsSize, usageIncludingRelocations); } usage = usageIncludingRelocations; } return usage; } /** * Returns a {@link DiskUsage} for the {@link RoutingNode} using the * average usage of other nodes in the disk usage map. * @param node Node to return an averaged DiskUsage object for * @param usages Map of nodeId to DiskUsage for all known nodes * @return DiskUsage representing given node using the average disk usage */ DiskUsage averageUsage(RoutingNode node, ImmutableOpenMap<String, DiskUsage> usages) { if (usages.size() == 0) { return new DiskUsage(node.nodeId(), node.node().getName(), "_na_", 0, 0); } long totalBytes = 0; long freeBytes = 0; for (ObjectCursor<DiskUsage> du : usages.values()) { totalBytes += du.value.getTotalBytes(); freeBytes += du.value.getFreeBytes(); } return new DiskUsage(node.nodeId(), node.node().getName(), "_na_", totalBytes / usages.size(), freeBytes / usages.size()); } /** * Given the DiskUsage for a node and the size of the shard, return the * percentage of free disk if the shard were to be allocated to the node. * @param usage A DiskUsage for the node to have space computed for * @param shardSize Size in bytes of the shard * @return Percentage of free space after the shard is assigned to the node */ double freeDiskPercentageAfterShardAssigned(DiskUsage usage, Long shardSize) { shardSize = (shardSize == null) ? 0 : shardSize; DiskUsage newUsage = new DiskUsage(usage.getNodeId(), usage.getNodeName(), usage.getPath(), usage.getTotalBytes(), usage.getFreeBytes() - shardSize); return newUsage.getFreeDiskAsPercentage(); } private Decision earlyTerminate(RoutingAllocation allocation, ImmutableOpenMap<String, DiskUsage> usages) { // Always allow allocation if the decider is disabled if (diskThresholdSettings.isEnabled() == false) { return allocation.decision(Decision.YES, NAME, "the disk threshold decider is disabled"); } // Allow allocation regardless if only a single data node is available if (allocation.nodes().getDataNodes().size() <= 1) { if (logger.isTraceEnabled()) { logger.trace("only a single data node is present, allowing allocation"); } return allocation.decision(Decision.YES, NAME, "there is only a single data node present"); } // Fail open there is no info available final ClusterInfo clusterInfo = allocation.clusterInfo(); if (clusterInfo == null) { if (logger.isTraceEnabled()) { logger.trace("cluster info unavailable for disk threshold decider, allowing allocation."); } return allocation.decision(Decision.YES, NAME, "the cluster info is unavailable"); } // Fail open if there are no disk usages available if (usages.isEmpty()) { if (logger.isTraceEnabled()) { logger.trace("unable to determine disk usages for disk-aware allocation, allowing allocation"); } return allocation.decision(Decision.YES, NAME, "disk usages are unavailable"); } return null; } /** * Returns the expected shard size for the given shard or the default value provided if not enough information are available * to estimate the shards size. */ public static long getExpectedShardSize(ShardRouting shard, RoutingAllocation allocation, long defaultValue) { final IndexMetaData metaData = allocation.metaData().getIndexSafe(shard.index()); final ClusterInfo info = allocation.clusterInfo(); if (metaData.getMergeSourceIndex() != null && shard.active() == false && shard.recoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) { // in the shrink index case we sum up the source index shards since we basically make a copy of the shard in // the worst case long targetShardSize = 0; final Index mergeSourceIndex = metaData.getMergeSourceIndex(); final IndexMetaData sourceIndexMeta = allocation.metaData().getIndexSafe(mergeSourceIndex); final Set<ShardId> shardIds = IndexMetaData.selectShrinkShards(shard.id(), sourceIndexMeta, metaData.getNumberOfShards()); for (IndexShardRoutingTable shardRoutingTable : allocation.routingTable().index(mergeSourceIndex.getName())) { if (shardIds.contains(shardRoutingTable.shardId())) { targetShardSize += info.getShardSize(shardRoutingTable.primaryShard(), 0); } } return targetShardSize == 0 ? defaultValue : targetShardSize; } else { return info.getShardSize(shard, defaultValue); } } }
package romelo333.rflux.blocks; import elucent.albedo.lighting.ILightProvider; import elucent.albedo.lighting.Light; import mcjty.lib.bindings.DefaultValue; import mcjty.lib.bindings.IValue; import mcjty.lib.blocks.BaseBlock; import mcjty.lib.blocks.GenericBlock; import mcjty.lib.tileentity.GenericEnergyReceiverTileEntity; import mcjty.lib.typed.Key; import mcjty.lib.typed.Type; import net.minecraft.block.state.IBlockState; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.EnumFacing; import net.minecraft.util.ITickable; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraftforge.fml.common.Optional; import romelo333.rflux.ConfigSetup; import romelo333.rflux.ModBlocks; @Optional.InterfaceList({ @Optional.Interface(iface = "elucent.albedo.lighting.ILightProvider", modid = "albedo") }) public class LightTE extends GenericEnergyReceiverTileEntity implements ITickable, ILightProvider { private BlockColor color = BlockColor.WHITE; private Object light = null; private boolean lit = false; private LightMode mode = LightMode.MODE_NORMAL; private int checkLitCounter = 10; public static final Key<Integer> VALUE_MODE = new Key<>("mode", Type.INTEGER); @Override public IValue<?>[] getValues() { return new IValue[] { new DefaultValue<>(VALUE_RSMODE, this::getRSModeInt, this::setRSModeInt), new DefaultValue<>(VALUE_MODE, () -> this.getMode().ordinal(), (v) -> this.setMode(LightMode.values()[v])), }; } @Override protected boolean needsRedstoneMode() { return true; } public LightTE() { super(ConfigSetup.LIGHTBLOCK_MAXRF, ConfigSetup.LIGHTBLOCK_RECEIVEPERTICK); } public boolean isLit() { return lit; } @Optional.Method(modid = "albedo") @Override public Light provideLight() { if (light == null) { if (lit) { light = new Light(pos.getX(), pos.getY(), pos.getZ(), color.getR(), color.getG(), color.getB(), 1.0f, mode == LightMode.MODE_NORMAL ? 16.0f : mode == LightMode.MODE_EXTENDED ? 20.0f : 24.0f); } } return (Light) light; } @Override public void update() { if (!getWorld().isRemote) { boolean newlit = isMachineEnabled(); if (newlit) { // We are still potentially lit so do this. int rf = mode.getRfUsage(); if (storage.getEnergyStored() >= rf) { storage.extractEnergy(rf, false); } else { newlit = false; } } if (newlit != lit) { // State has changed so we must update. lit = newlit; light = null; IBlockState oldState = getWorld().getBlockState(pos); GenericLightBlock block = (GenericLightBlock) oldState.getBlock(); if (block.getRotationType() == BaseBlock.RotationType.NONE) { if (lit) { getWorld().setBlockState(pos, block.getLitBlock().getDefaultState(), 3); } else { getWorld().setBlockState(pos, block.getUnlitBlock().getDefaultState(), 3); } } else { if (lit) { getWorld().setBlockState(pos, block.getLitBlock().getDefaultState().withProperty(GenericBlock.FACING, oldState.getValue(GenericBlock.FACING)), 3); } else { getWorld().setBlockState(pos, block.getUnlitBlock().getDefaultState().withProperty(GenericBlock.FACING, oldState.getValue(GenericBlock.FACING)), 3); } } // Restore the TE, needed since our block has changed this.validate(); getWorld().setTileEntity(pos, this); markDirtyClient(); updateLightBlocks(lit); } else if (lit) { // We are lit, check that our blocks are still there. checkLitCounter--; if (checkLitCounter <= 0) { checkLitCounter = 10; updateLightBlocks(lit); } } } } private void updateLightBlocks(boolean lit) { BlockPos.MutableBlockPos lpos = new BlockPos.MutableBlockPos(); int range = mode.getRange(); if (range == 0) { return; } else { for (int x = -range; x <= range; x += range) { for (int y = -range; y <= range; y += range) { for (int z = -range; z <= range; z += range) { if (x != 0 || y != 0 || z != 0) { if (lit) { lpos.setPos(pos.getX() + x, pos.getY() + y, pos.getZ() + z); if (!isInvisibleLight(lpos)) { if (getWorld().isAirBlock(lpos)) { // This is not a light block but it is air. We can place a block setInvisibleBlock(lpos); } else { // Not a light block and not air. Check adjacent locations for (EnumFacing facing : EnumFacing.VALUES) { BlockPos npos = lpos.offset(facing); if (!isInvisibleLight(npos) && getWorld().isAirBlock(npos)) { setInvisibleBlock(npos); } } } } } else { lpos.setPos(pos.getX() + x, pos.getY() + y, pos.getZ() + z); if (isInvisibleLight(lpos)) { getWorld().setBlockToAir(lpos); } for (EnumFacing facing : EnumFacing.VALUES) { BlockPos npos = lpos.offset(facing); if (isInvisibleLight(npos)) { getWorld().setBlockToAir(npos); } } } } } } } } } private boolean setInvisibleBlock(BlockPos npos) { return getWorld().setBlockState(npos, ModBlocks.invisibleLightBlock.getDefaultState(), 3); } private boolean isInvisibleLight(BlockPos lpos) { return getWorld().getBlockState(lpos).getBlock() == ModBlocks.invisibleLightBlock; } @Override public void onBlockBreak(World world, BlockPos pos, IBlockState state) { updateLightBlocks(false); } public void setMode(LightMode mode) { if (mode == this.mode) { return; } light = null; boolean oldlit = lit; this.lit = false; // Force a relight updateLightBlocks(lit); this.mode = mode; this.lit = oldlit; markDirty(); } public LightMode getMode() { return mode; } public BlockColor getColor() { return color; } public void setColor(BlockColor color) { this.color = color; this.light = null; markDirtyClient(); } @Override public void readFromNBT(NBTTagCompound tagCompound) { super.readFromNBT(tagCompound); lit = tagCompound.getBoolean("lit"); } @Override public NBTTagCompound writeToNBT(NBTTagCompound tagCompound) { super.writeToNBT(tagCompound); tagCompound.setBoolean("lit", lit); return tagCompound; } @Override public void readRestorableFromNBT(NBTTagCompound tagCompound) { super.readRestorableFromNBT(tagCompound); mode = LightMode.values()[tagCompound.getByte("mode")]; color = BlockColor.values()[tagCompound.getInteger("color")]; } @Override public void writeRestorableToNBT(NBTTagCompound tagCompound) { super.writeRestorableToNBT(tagCompound); tagCompound.setInteger("color", color.ordinal()); tagCompound.setByte("mode", (byte) mode.ordinal()); } }
/* * Copyright 2015 JBoss, by Red Hat, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.uberfire.ext.editor.commons.client.history; import java.util.List; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.inject.Inject; import org.jboss.errai.common.client.api.Caller; import org.jboss.errai.common.client.api.RemoteCallback; import org.uberfire.backend.vfs.ObservablePath; import org.uberfire.backend.vfs.Path; import org.uberfire.client.callbacks.Callback; import org.uberfire.commons.validation.PortablePreconditions; import org.uberfire.ext.editor.commons.client.file.RestoreUtil; import org.uberfire.ext.editor.commons.client.file.popups.RestorePopUpPresenter; import org.uberfire.ext.editor.commons.client.history.event.VersionSelectedEvent; import org.uberfire.ext.editor.commons.version.VersionService; import org.uberfire.ext.editor.commons.version.events.RestoreEvent; import org.uberfire.java.nio.base.version.VersionRecord; import org.uberfire.mvp.Command; import org.uberfire.workbench.model.menu.MenuItem; public class VersionRecordManager { private VersionMenuDropDownButton versionMenuDropDownButton; private Event<VersionSelectedEvent> versionSelectedEvent; private RestorePopUpPresenter restorePopUpPresenter; private RestoreUtil restoreUtil; private Caller<VersionService> versionService; private Callback<VersionRecord> selectionCallback; private List<VersionRecord> versions; private ObservablePath pathToLatest; private String version; private SaveButton saveButton; @Inject public VersionRecordManager( final VersionMenuDropDownButton versionMenuDropDownButton, final SaveButton saveButton, final RestorePopUpPresenter restorePopUpPresenter, final RestoreUtil restoreUtil, final Event<VersionSelectedEvent> versionSelectedEvent, final Caller<VersionService> versionService ) { this.restorePopUpPresenter = restorePopUpPresenter; this.versionMenuDropDownButton = versionMenuDropDownButton; this.saveButton = saveButton; this.versionSelectedEvent = versionSelectedEvent; versionMenuDropDownButton.addSelectionCallback( new Callback<VersionRecord>() { @Override public void callback( VersionRecord versionRecord ) { fireVersionSelected( versionRecord ); } } ); this.restoreUtil = restoreUtil; this.versionService = versionService; } private void fireVersionSelected( final VersionRecord versionRecord ) { versionSelectedEvent.fire( new VersionSelectedEvent( getPathToLatest(), versionRecord ) ); } public void init( final String version, final ObservablePath path, final Callback<VersionRecord> selectionCallback ) { clear(); PortablePreconditions.checkNotNull( "path", path ); this.selectionCallback = PortablePreconditions.checkNotNull( "selectionCallback", selectionCallback ); this.version = version; if ( version == null ) { setPathToLatest( path ); } loadVersions( path ); } public MenuItem buildMenu() { return new VersionMenuItem( versionMenuDropDownButton ); } public void setVersions( final List<VersionRecord> versions ) { if ( version == null ) { version = versions.get( versions.size() - 1 ).id(); } setVersions( versions, version ); } private void setVersions( final List<VersionRecord> versions, final String version ) { PortablePreconditions.checkNotNull( "versions", versions ); resolveVersions( versions ); updateSaveButtonText(); versionMenuDropDownButton.setItems( versions ); versionMenuDropDownButton.setVersion( version ); } public void setShowMoreCommand( final Command showMore ) { versionMenuDropDownButton.setShowMoreCommand( showMore ); } private void resolveVersions( final List<VersionRecord> versions ) { if ( this.versions == null || versions.size() > this.versions.size() ) { this.versions = versions; } } public MenuItem newSaveMenuItem( final Command command ) { saveButton.setCommand( command ); return saveButton; } public boolean isLatest( final VersionRecord versionRecord ) { return versions.get( versions.size() - 1 ).id().equals( versionRecord.id() ); } private void setPathToLatest( final ObservablePath pathToLatest ) { this.pathToLatest = PortablePreconditions.checkNotNull( "pathToLatest", pathToLatest ); } public ObservablePath getPathToLatest() { return pathToLatest; } public void onVersionSelectedEvent( final @Observes VersionSelectedEvent event ) { if ( event.getPathToFile().equals( getPathToLatest() ) && selectionCallback != null ) { selectionCallback.callback( event.getVersionRecord() ); } } public void setVersion( final String version ) { this.version = PortablePreconditions.checkNotNull( "version", version ); versionMenuDropDownButton.setVersion( version ); updateSaveButtonText(); } void updateSaveButtonText() { if ( saveButton != null ) { if ( isCurrentLatest() ) { saveButton.setTextToSave(); } else if ( versions != null ) { saveButton.setTextToRestore(); } } } public String getVersion() { return version; } public ObservablePath getCurrentPath() { if ( isCurrentLatest() ) { return getPathToLatest(); } else { return restoreUtil.createObservablePath( getPathToLatest(), getCurrentVersionRecordUri() ); } } public boolean isCurrentLatest() { return versions == null || getLatestVersionRecord().id().equals( version ); } private VersionRecord getLatestVersionRecord() { return versions.get( versions.size() - 1 ); } private String getCurrentVersionRecordUri() { VersionRecord record = getCurrentVersionRecord(); if ( record == null ) { return getPathToLatest().toURI(); } else { return record.uri(); } } private VersionRecord getCurrentVersionRecord() { for ( VersionRecord versionRecord : versions ) { if ( versionRecord.id().equals( version ) ) { return versionRecord; } } return null; } public void restoreToCurrentVersion() { restorePopUpPresenter.show( getCurrentPath(), getCurrentVersionRecordUri() ); } private void loadVersions( final ObservablePath path ) { loadVersions( path, new Callback<List<VersionRecord>>() { @Override public void callback( final List<VersionRecord> records ) { doesTheVersionExist( records ); } } ); } private void doesTheVersionExist( final List<VersionRecord> records ) { boolean found = false; for ( VersionRecord versionRecord : records ) { if ( versionRecord.id().equals( version ) ) { found = true; break; } } if ( !found ) { throw new IllegalArgumentException( "Unknown version" ); } } public void reloadVersions( final Path path ) { loadVersions( path, new Callback<List<VersionRecord>>() { @Override public void callback( final List<VersionRecord> records ) { setVersion( records.get( records.size() - 1 ).id() ); } } ); } private void loadVersions( final Path path, final Callback<List<VersionRecord>> callback ) { versionService.call( new RemoteCallback<List<VersionRecord>>() { @Override public void callback( final List<VersionRecord> records ) { String uri = path.toURI(); // We should not recreate the path to latest, // since the new path instance will not have version support if ( !path.equals( pathToLatest ) ) { setPathToLatest( restoreUtil.createObservablePath( path, uri ) ); } setVersions( records ); callback.callback( records ); } } ).getVersions( path ); } private void onRestore( final @Observes RestoreEvent restore ) { if ( getCurrentPath() != null && getCurrentPath().equals( restore.getPath() ) && saveButton != null ) { saveButton.setTextToSave(); } } public void clear() { selectionCallback = null; versions = null; pathToLatest = null; version = null; versionMenuDropDownButton.resetVersions(); } }
package com.smartdevicelink.test.rpc.responses; import com.smartdevicelink.marshal.JsonRPCMarshaller; import com.smartdevicelink.protocol.enums.FunctionID; import com.smartdevicelink.proxy.RPCMessage; import com.smartdevicelink.proxy.rpc.AudioPassThruCapabilities; import com.smartdevicelink.proxy.rpc.ButtonCapabilities; import com.smartdevicelink.proxy.rpc.DisplayCapabilities; import com.smartdevicelink.proxy.rpc.PresetBankCapabilities; import com.smartdevicelink.proxy.rpc.RegisterAppInterfaceResponse; import com.smartdevicelink.proxy.rpc.SdlMsgVersion; import com.smartdevicelink.proxy.rpc.SoftButtonCapabilities; import com.smartdevicelink.proxy.rpc.VehicleType; import com.smartdevicelink.proxy.rpc.enums.HmiZoneCapabilities; import com.smartdevicelink.proxy.rpc.enums.Language; import com.smartdevicelink.proxy.rpc.enums.PrerecordedSpeech; import com.smartdevicelink.proxy.rpc.enums.SpeechCapabilities; import com.smartdevicelink.proxy.rpc.enums.VrCapabilities; import com.smartdevicelink.test.BaseRpcTests; import com.smartdevicelink.test.JsonUtils; import com.smartdevicelink.test.TestValues; import com.smartdevicelink.test.Validator; import com.smartdevicelink.test.json.rpc.JsonFileReader; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.junit.Test; import java.util.ArrayList; import java.util.Hashtable; import java.util.List; import static androidx.test.platform.app.InstrumentationRegistry.getInstrumentation; import static junit.framework.TestCase.assertEquals; import static junit.framework.TestCase.assertNotNull; import static junit.framework.TestCase.assertNull; import static junit.framework.TestCase.assertTrue; import static junit.framework.TestCase.fail; /** * This is a unit test class for the SmartDeviceLink library project class : * {@link com.smartdevicelink.proxy.rpc.RegisterAppInterfaceResponse} */ public class RegisterAppInterfaceResponseTest extends BaseRpcTests { @Override protected RPCMessage createMessage() { RegisterAppInterfaceResponse msg = new RegisterAppInterfaceResponse(); msg.setSdlMsgVersion(TestValues.GENERAL_SDLMSGVERSION); msg.setLanguage(TestValues.GENERAL_LANGUAGE); msg.setHmiDisplayLanguage(TestValues.GENERAL_LANGUAGE); msg.setDisplayCapabilities(TestValues.GENERAL_DISPLAYCAPABILITIES); msg.setPresetBankCapabilities(TestValues.GENERAL_PRESETBANKCAPABILITIES); msg.setVehicleType(TestValues.GENERAL_VEHICLETYPE); msg.setButtonCapabilities(TestValues.GENERAL_BUTTONCAPABILITIES_LIST); msg.setSoftButtonCapabilities(TestValues.GENERAL_SOFTBUTTONCAPABILITIES_LIST); msg.setAudioPassThruCapabilities(TestValues.GENERAL_AUDIOPASSTHRUCAPABILITIES_LIST); msg.setPcmStreamingCapabilities(TestValues.GENERAL_AUDIOPASSTHRUCAPABILITIES); msg.setHmiZoneCapabilities(TestValues.GENERAL_HMIZONECAPABILITIES_LIST); msg.setSpeechCapabilities(TestValues.GENERAL_SPEECHCAPABILITIES_LIST); msg.setVrCapabilities(TestValues.GENERAL_VRCAPABILITIES_LIST); msg.setPrerecordedSpeech(TestValues.GENERAL_PRERECORDEDSPEECH_LIST); msg.setSupportedDiagModes(TestValues.GENERAL_INTEGER_LIST); msg.setIconResumed(TestValues.GENERAL_BOOLEAN); return msg; } @Override protected String getMessageType() { return RPCMessage.KEY_RESPONSE; } @Override protected String getCommandType() { return FunctionID.REGISTER_APP_INTERFACE.toString(); } @Override protected JSONObject getExpectedParameters(int sdlVersion) { JSONObject result = new JSONObject(); try { result.put(RegisterAppInterfaceResponse.KEY_LANGUAGE, TestValues.GENERAL_LANGUAGE); result.put(RegisterAppInterfaceResponse.KEY_HMI_DISPLAY_LANGUAGE, TestValues.GENERAL_LANGUAGE); result.put(RegisterAppInterfaceResponse.KEY_SUPPORTED_DIAG_MODES, JsonUtils.createJsonArray(TestValues.GENERAL_INTEGER_LIST)); result.put(RegisterAppInterfaceResponse.KEY_SDL_MSG_VERSION, TestValues.JSON_SDLMSGVERSION); result.put(RegisterAppInterfaceResponse.KEY_VEHICLE_TYPE, TestValues.GENERAL_VEHICLETYPE.serializeJSON()); result.put(RegisterAppInterfaceResponse.KEY_PRESET_BANK_CAPABILITIES, TestValues.JSON_PRESETBANKCAPABILITIES); result.put(RegisterAppInterfaceResponse.KEY_DISPLAY_CAPABILITIES, TestValues.JSON_DISPLAYCAPABILITIES); result.put(RegisterAppInterfaceResponse.KEY_BUTTON_CAPABILITIES, TestValues.JSON_BUTTONCAPABILITIES); result.put(RegisterAppInterfaceResponse.KEY_SOFT_BUTTON_CAPABILITIES, TestValues.JSON_SOFTBUTTONCAPABILITIES); result.put(RegisterAppInterfaceResponse.KEY_AUDIO_PASS_THRU_CAPABILITIES, TestValues.JSON_AUDIOPASSTHRUCAPABILITIES); result.put(RegisterAppInterfaceResponse.KEY_PCM_STREAM_CAPABILITIES, TestValues.JSON_PCMSTREAMCAPABILITIES); result.put(RegisterAppInterfaceResponse.KEY_SPEECH_CAPABILITIES, JsonUtils.createJsonArray(TestValues.GENERAL_SPEECHCAPABILITIES_LIST)); result.put(RegisterAppInterfaceResponse.KEY_VR_CAPABILITIES, JsonUtils.createJsonArray(TestValues.GENERAL_VRCAPABILITIES_LIST)); result.put(RegisterAppInterfaceResponse.KEY_HMI_ZONE_CAPABILITIES, JsonUtils.createJsonArray(TestValues.GENERAL_HMIZONECAPABILITIES_LIST)); result.put(RegisterAppInterfaceResponse.KEY_PRERECORDED_SPEECH, JsonUtils.createJsonArray(TestValues.GENERAL_PRERECORDEDSPEECH_LIST)); result.put(RegisterAppInterfaceResponse.KEY_ICON_RESUMED, TestValues.GENERAL_BOOLEAN); } catch (JSONException e) { fail(TestValues.JSON_FAIL); } return result; } /** * Tests the expected values of the RPC message. */ @Test public void testRpcValues() { // Test Values List<Integer> testSupportedDiagModes = ((RegisterAppInterfaceResponse) msg).getSupportedDiagModes(); List<PrerecordedSpeech> testPrerecordedSpeech = ((RegisterAppInterfaceResponse) msg).getPrerecordedSpeech(); List<VrCapabilities> testVrCapabilities = ((RegisterAppInterfaceResponse) msg).getVrCapabilities(); List<SpeechCapabilities> testSpeechCapabilities = ((RegisterAppInterfaceResponse) msg).getSpeechCapabilities(); List<HmiZoneCapabilities> testHmiZoneCapabilities = ((RegisterAppInterfaceResponse) msg).getHmiZoneCapabilities(); List<SoftButtonCapabilities> testSoftButtonCapabilities = ((RegisterAppInterfaceResponse) msg).getSoftButtonCapabilities(); List<ButtonCapabilities> testButtonCapabilities = ((RegisterAppInterfaceResponse) msg).getButtonCapabilities(); VehicleType testVehicleType = ((RegisterAppInterfaceResponse) msg).getVehicleType(); PresetBankCapabilities testPbc = ((RegisterAppInterfaceResponse) msg).getPresetBankCapabilities(); DisplayCapabilities testDc = ((RegisterAppInterfaceResponse) msg).getDisplayCapabilities(); Language testHmiLang = ((RegisterAppInterfaceResponse) msg).getHmiDisplayLanguage(); Language testLang = ((RegisterAppInterfaceResponse) msg).getLanguage(); SdlMsgVersion testMsgVersion = ((RegisterAppInterfaceResponse) msg).getSdlMsgVersion(); List<AudioPassThruCapabilities> testAptc = ((RegisterAppInterfaceResponse) msg).getAudioPassThruCapabilities(); AudioPassThruCapabilities testPcmStream = ((RegisterAppInterfaceResponse) msg).getPcmStreamingCapabilities(); Boolean testIconResumed = ((RegisterAppInterfaceResponse) msg).getIconResumed(); // Valid Tests assertEquals(TestValues.MATCH, TestValues.GENERAL_INTEGER_LIST, testSupportedDiagModes); assertEquals(TestValues.MATCH, TestValues.GENERAL_PRERECORDEDSPEECH_LIST, testPrerecordedSpeech); assertEquals(TestValues.MATCH, TestValues.GENERAL_VRCAPABILITIES_LIST, testVrCapabilities); assertEquals(TestValues.MATCH, TestValues.GENERAL_SPEECHCAPABILITIES_LIST, testSpeechCapabilities); assertEquals(TestValues.MATCH, TestValues.GENERAL_HMIZONECAPABILITIES_LIST, testHmiZoneCapabilities); assertTrue(TestValues.TRUE, Validator.validateSoftButtonCapabilities(TestValues.GENERAL_SOFTBUTTONCAPABILITIES_LIST, testSoftButtonCapabilities)); assertTrue(TestValues.TRUE, Validator.validateButtonCapabilities(TestValues.GENERAL_BUTTONCAPABILITIES_LIST, testButtonCapabilities)); assertTrue(TestValues.TRUE, Validator.validateVehicleType(TestValues.GENERAL_VEHICLETYPE, testVehicleType)); assertTrue(TestValues.TRUE, Validator.validatePresetBankCapabilities(TestValues.GENERAL_PRESETBANKCAPABILITIES, testPbc)); assertTrue(TestValues.TRUE, Validator.validateDisplayCapabilities(TestValues.GENERAL_DISPLAYCAPABILITIES, testDc)); assertEquals(TestValues.MATCH, TestValues.GENERAL_LANGUAGE, testHmiLang); assertEquals(TestValues.MATCH, TestValues.GENERAL_LANGUAGE, testLang); assertTrue(TestValues.TRUE, Validator.validateSdlMsgVersion(TestValues.GENERAL_SDLMSGVERSION, testMsgVersion)); assertTrue(TestValues.TRUE, Validator.validateAudioPassThruCapabilities(TestValues.GENERAL_AUDIOPASSTHRUCAPABILITIES_LIST, testAptc)); assertTrue(TestValues.TRUE, Validator.validatePcmStreamCapabilities(TestValues.GENERAL_AUDIOPASSTHRUCAPABILITIES, testPcmStream)); assertEquals(TestValues.MATCH, (Boolean) TestValues.GENERAL_BOOLEAN, testIconResumed); // Invalid/Null Tests RegisterAppInterfaceResponse msg = new RegisterAppInterfaceResponse(); assertNotNull(TestValues.NOT_NULL, msg); testNullBase(msg); assertNull(TestValues.NULL, msg.getSdlMsgVersion()); assertNull(TestValues.NULL, msg.getLanguage()); assertNull(TestValues.NULL, msg.getHmiDisplayLanguage()); assertNull(TestValues.NULL, msg.getDisplayCapabilities()); assertNull(TestValues.NULL, msg.getPresetBankCapabilities()); assertNull(TestValues.NULL, msg.getVehicleType()); assertNull(TestValues.NULL, msg.getButtonCapabilities()); assertNull(TestValues.NULL, msg.getSoftButtonCapabilities()); assertNull(TestValues.NULL, msg.getAudioPassThruCapabilities()); assertNull(TestValues.NULL, msg.getPcmStreamingCapabilities()); assertNull(TestValues.NULL, msg.getHmiZoneCapabilities()); assertNull(TestValues.NULL, msg.getSpeechCapabilities()); assertNull(TestValues.NULL, msg.getVrCapabilities()); assertNull(TestValues.NULL, msg.getPrerecordedSpeech()); assertNull(TestValues.NULL, msg.getSupportedDiagModes()); assertNull(TestValues.NULL, msg.getIconResumed()); } /** * Tests a valid JSON construction of this RPC message. */ @Test public void testJsonConstructor() { JSONObject commandJson = JsonFileReader.readId(getInstrumentation().getContext(), getCommandType(), getMessageType()); assertNotNull(TestValues.NOT_NULL, commandJson); try { Hashtable<String, Object> hash = JsonRPCMarshaller.deserializeJSONObject(commandJson); RegisterAppInterfaceResponse cmd = new RegisterAppInterfaceResponse(hash); JSONObject body = JsonUtils.readJsonObjectFromJsonObject(commandJson, getMessageType()); assertNotNull(TestValues.NOT_NULL, body); // Test everything in the json body. assertEquals(TestValues.MATCH, JsonUtils.readStringFromJsonObject(body, RPCMessage.KEY_FUNCTION_NAME), cmd.getFunctionName()); assertEquals(TestValues.MATCH, JsonUtils.readIntegerFromJsonObject(body, RPCMessage.KEY_CORRELATION_ID), cmd.getCorrelationID()); JSONObject parameters = JsonUtils.readJsonObjectFromJsonObject(body, RPCMessage.KEY_PARAMETERS); JSONObject vehicleTypeObj = JsonUtils.readJsonObjectFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_VEHICLE_TYPE); VehicleType vehicleType = new VehicleType(JsonRPCMarshaller.deserializeJSONObject(vehicleTypeObj)); assertTrue(TestValues.TRUE, Validator.validateVehicleType(vehicleType, cmd.getVehicleType())); JSONObject pcmStreamObj = JsonUtils.readJsonObjectFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_PCM_STREAM_CAPABILITIES); AudioPassThruCapabilities pcmStreamCap = new AudioPassThruCapabilities(JsonRPCMarshaller.deserializeJSONObject(pcmStreamObj)); assertTrue(TestValues.TRUE, Validator.validatePcmStreamCapabilities(pcmStreamCap, cmd.getPcmStreamingCapabilities())); JSONArray speechCapabilitiesArray = JsonUtils.readJsonArrayFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_SPEECH_CAPABILITIES); for (int index = 0; index < speechCapabilitiesArray.length(); index++) { SpeechCapabilities speechCapability = SpeechCapabilities.valueForString(speechCapabilitiesArray.get(index).toString()); assertEquals(TestValues.MATCH, speechCapability, cmd.getSpeechCapabilities().get(index)); } JSONArray vrCapabilitiesArray = JsonUtils.readJsonArrayFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_VR_CAPABILITIES); for (int index = 0; index < vrCapabilitiesArray.length(); index++) { VrCapabilities vrCapability = VrCapabilities.valueForString(vrCapabilitiesArray.get(index).toString()); assertEquals(TestValues.MATCH, vrCapability, cmd.getVrCapabilities().get(index)); } JSONArray audioPassThruCapabilitiesArray = JsonUtils.readJsonArrayFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_AUDIO_PASS_THRU_CAPABILITIES); List<AudioPassThruCapabilities> audioPassThruCapabilitiesList = new ArrayList<AudioPassThruCapabilities>(); for (int index = 0; index < audioPassThruCapabilitiesArray.length(); index++) { AudioPassThruCapabilities audioPassThruCapability = new AudioPassThruCapabilities(JsonRPCMarshaller.deserializeJSONObject((JSONObject) audioPassThruCapabilitiesArray.get(index))); audioPassThruCapabilitiesList.add(audioPassThruCapability); } assertTrue(TestValues.TRUE, Validator.validateAudioPassThruCapabilities(audioPassThruCapabilitiesList, cmd.getAudioPassThruCapabilities())); JSONArray hmiZoneCapabilitiesArray = JsonUtils.readJsonArrayFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_HMI_ZONE_CAPABILITIES); for (int index = 0; index < hmiZoneCapabilitiesArray.length(); index++) { HmiZoneCapabilities hmiZoneCapability = HmiZoneCapabilities.valueForString(hmiZoneCapabilitiesArray.get(index).toString()); assertEquals(TestValues.MATCH, hmiZoneCapability, cmd.getHmiZoneCapabilities().get(index)); } JSONArray prerecordedSpeechArray = JsonUtils.readJsonArrayFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_PRERECORDED_SPEECH); for (int index = 0; index < prerecordedSpeechArray.length(); index++) { PrerecordedSpeech prerecordedSpeech = PrerecordedSpeech.valueForString(prerecordedSpeechArray.get(index).toString()); assertEquals(TestValues.MATCH, prerecordedSpeech, cmd.getPrerecordedSpeech().get(index)); } List<Integer> supportedDiagnosticModesList = JsonUtils.readIntegerListFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_SUPPORTED_DIAG_MODES); List<Integer> testDiagnosticModesList = cmd.getSupportedDiagModes(); assertEquals(TestValues.MATCH, supportedDiagnosticModesList.size(), testDiagnosticModesList.size()); assertTrue(TestValues.TRUE, Validator.validateIntegerList(supportedDiagnosticModesList, testDiagnosticModesList)); JSONObject sdlMsgVersionObj = JsonUtils.readJsonObjectFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_SDL_MSG_VERSION); SdlMsgVersion sdlMsgVersion = new SdlMsgVersion(JsonRPCMarshaller.deserializeJSONObject(sdlMsgVersionObj)); assertTrue(TestValues.TRUE, Validator.validateSdlMsgVersion(sdlMsgVersion, cmd.getSdlMsgVersion())); assertEquals(TestValues.MATCH, JsonUtils.readStringFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_LANGUAGE), cmd.getLanguage().toString()); JSONArray buttonCapabilitiesArray = JsonUtils.readJsonArrayFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_BUTTON_CAPABILITIES); List<ButtonCapabilities> buttonCapabilitiesList = new ArrayList<ButtonCapabilities>(); for (int index = 0; index < buttonCapabilitiesArray.length(); index++) { ButtonCapabilities buttonCapability = new ButtonCapabilities(JsonRPCMarshaller.deserializeJSONObject((JSONObject) buttonCapabilitiesArray.get(index))); buttonCapabilitiesList.add(buttonCapability); } assertTrue(TestValues.TRUE, Validator.validateButtonCapabilities(buttonCapabilitiesList, cmd.getButtonCapabilities())); JSONObject displayCapabilitiesObj = JsonUtils.readJsonObjectFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_DISPLAY_CAPABILITIES); DisplayCapabilities displayCapabilities = new DisplayCapabilities(JsonRPCMarshaller.deserializeJSONObject(displayCapabilitiesObj)); assertTrue(TestValues.TRUE, Validator.validateDisplayCapabilities(displayCapabilities, cmd.getDisplayCapabilities())); assertEquals(TestValues.MATCH, JsonUtils.readStringFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_HMI_DISPLAY_LANGUAGE), cmd.getHmiDisplayLanguage().toString()); JSONArray softButtonCapabilitiesArray = JsonUtils.readJsonArrayFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_SOFT_BUTTON_CAPABILITIES); List<SoftButtonCapabilities> softButtonCapabilitiesList = new ArrayList<SoftButtonCapabilities>(); for (int index = 0; index < softButtonCapabilitiesArray.length(); index++) { SoftButtonCapabilities softButtonCapability = new SoftButtonCapabilities(JsonRPCMarshaller.deserializeJSONObject((JSONObject) softButtonCapabilitiesArray.get(index))); softButtonCapabilitiesList.add(softButtonCapability); } assertTrue(TestValues.TRUE, Validator.validateSoftButtonCapabilities(softButtonCapabilitiesList, cmd.getSoftButtonCapabilities())); JSONObject presetBankCapabilitiesObj = JsonUtils.readJsonObjectFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_PRESET_BANK_CAPABILITIES); PresetBankCapabilities presetBankCapabilities = new PresetBankCapabilities(JsonRPCMarshaller.deserializeJSONObject(presetBankCapabilitiesObj)); assertTrue(TestValues.TRUE, Validator.validatePresetBankCapabilities(presetBankCapabilities, cmd.getPresetBankCapabilities())); Boolean iconResumed = JsonUtils.readBooleanFromJsonObject(parameters, RegisterAppInterfaceResponse.KEY_ICON_RESUMED); assertEquals(TestValues.MATCH, iconResumed, cmd.getIconResumed()); } catch (JSONException e) { e.printStackTrace(); } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.pratesis.scyllasync; import com.form.frmSync; import com.google.gson.Gson; import com.models.MblUpload; import com.models.Mblfaktivitas; import com.models.Mblfarcndn; import com.models.Mblfarretur; import com.models.Mblfbotol; import com.models.Mblfbrand; import com.models.Mblfcheq; import com.models.Mblfcustmst; import com.models.Mblfdiscd5; import com.models.Mblfdiscd6; import com.models.Mblfdisch; import com.models.Mblfembtipe; import com.models.Mblfgdisc; import com.models.Mblfgharga; import com.models.Mblfgrowth; import com.models.Mblfgrupout; import com.models.Mblfinfo; import com.models.Mblfkompcust; import com.models.Mblfkompetitor; import com.models.Mblfkpl; import com.models.Mblfmaster; import com.models.Mblfnote; import com.models.Mblfnotec; import com.models.Mblfparam; import com.models.Mblfpckdisc; import com.models.Mblfpckompt; import com.models.Mblfpiutang; import com.models.Mblfprice; import com.models.Mblfpriced; import com.models.Mblfprlin; import com.models.Mblfreasonb; import com.models.Mblfrute; import com.models.Mblfstcust; import com.models.Mblftarget; import com.models.Mblftpr3d1; import com.models.Mblftpr3d3; import com.models.Mblftpr3d6; import com.models.Mblftpr3h; import com.models.Mblftransfer; import com.models.Mblftretur; import com.models.Mblftypeout; import com.models.Mblfvdisinvd; import com.models.Mblfvdisinvh; import com.models.Mbltbiaya; import com.models.Person; import com.services.RestClient; import com.utils.HibernateUtil; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.ListIterator; import static javafx.scene.input.KeyCode.T; import org.hibernate.Query; import org.hibernate.Session; import retrofit.Callback; import retrofit.RetrofitError; import retrofit.client.Response; /** * * @author pratesis */ public class MainActivity { /** * @param args the command line arguments */ public static ArrayList<Mblfaktivitas> removeDuplicate(List <Mblfaktivitas> list) { HashSet <Mblfaktivitas> h = new HashSet<Mblfaktivitas>(list); list.clear(); list.addAll(h); ArrayList<Mblfaktivitas> result = new ArrayList<>(); result.addAll(list); return result; } public static void main(String[] args) { // new frmSync().setVisible(true); MblUpload upload = new MblUpload(); Gson gson = new Gson(); Session session = HibernateUtil.getSessionFactory().openSession(); try { Query query = session.createQuery("from Mblfaktivitas"); if(query.list().size()>0){ upload.setAktifitas(removeDuplicate((ArrayList<Mblfaktivitas>) query.list())); } query = session.createQuery("from Mblfarcndn"); if(query.list().size()>0){ upload.setCndn((List<Mblfarcndn>) query.list()); } // query = session.createQuery("from Mblfmaster"); // // if(query.list().size()>0){ // upload.setMaster((List<Mblfmaster>) query.list()); // } // // // // query = session.createQuery("from Mblfarretur"); // // if(query.list().size()>0){ // upload.setAretur((List<Mblfarretur>) query.list()); // } // // query = session.createQuery("from Mblfbotol"); // // if(query.list().size()>0){ // upload.setBotol((List<Mblfbotol>) query.list()); // } // // query = session.createQuery("from Mblfbrand"); // // if(query.list().size()>0){ // upload.setBrand((List<Mblfbrand>) query.list()); // } // // query = session.createQuery("from Mblfcheq"); // // if(query.list().size()>0){ // upload.setCheq((List<Mblfcheq>) query.list()); // } // // query = session.createQuery("from Mblfcustmst"); // // if(query.list().size()>0){ // upload.setCustmst((List<Mblfcustmst>) query.list()); // } // // query = session.createQuery("from Mblfdiscd5"); // // if(query.list().size()>0){ // upload.setFdisc5((List<Mblfdiscd5>) query.list()); // } // // query = session.createQuery("from Mblfdiscd6"); // // if(query.list().size()>0){ // upload.setFdisc6((List<Mblfdiscd6>) query.list()); // } // // query = session.createQuery("from Mblfdisch"); // // if(query.list().size()>0){ // upload.setFdisch((List<Mblfdisch>) query.list()); // } // // query = session.createQuery("from Mblfembtipe"); // // if(query.list().size()>0){ // upload.setFembtype((List<Mblfembtipe>) query.list()); // } // // query = session.createQuery("from Mblfgdisc"); // // if(query.list().size()>0){ // upload.setFgdisc((List<Mblfgdisc>) query.list()); // } // // query = session.createQuery("from Mblfgharga"); // // if(query.list().size()>0){ // upload.setGharga((List<Mblfgharga>) query.list()); // } // // query = session.createQuery("from Mblfgrowth"); // // if(query.list().size()>0){ // upload.setFgrowth((List<Mblfgrowth>) query.list()); // } // // query = session.createQuery("from Mblfgrupout"); // // if(query.list().size()>0){ // upload.setGroupout((List<Mblfgrupout>) query.list()); // } // // query = session.createQuery("from Mblfinfo"); // // if(query.list().size()>0){ // upload.setInfo((List<Mblfinfo>) query.list()); // } // // query = session.createQuery("from Mblfkompcust"); // // if(query.list().size()>0){ // upload.setKompcust((List<Mblfkompcust>) query.list()); // } // // query = session.createQuery("from Mblfkompetitor"); // // if(query.list().size()>0){ // upload.setKompetitor((List<Mblfkompetitor>) query.list()); // } // // query = session.createQuery("from Mblfkpl"); // // if(query.list().size()>0){ // upload.setKpl((List<Mblfkpl>) query.list()); // } // // // query = session.createQuery("from Mblfmaster"); // // if(query.list().size()>0){ // upload.setMaster((List<Mblfmaster>) query.list()); // } // // query = session.createQuery("from Mblfnote"); // // if(query.list().size()>0){ // upload.setNote((List<Mblfnote>) query.list()); // } // // query = session.createQuery("from Mblfnotec"); // // if(query.list().size()>0){ // upload.setNotec((List<Mblfnotec>) query.list()); // } // // query = session.createQuery("from Mblfpckdisc"); // // if(query.list().size()>0){ // upload.setPckdisc((List<Mblfpckdisc>) query.list()); // } // // query = session.createQuery("from Mblfpckompt"); // // if(query.list().size()>0){ // upload.setPckompt((List<Mblfpckompt>) query.list()); // } // // query = session.createQuery("from Mblfpiutang"); // // if(query.list().size()>0){ // upload.setPiutang((List<Mblfpiutang>) query.list()); // } // // query = session.createQuery("from Mblfprice"); // // if(query.list().size()>0){ // upload.setPrice((List<Mblfprice>) query.list()); // } // // query = session.createQuery("from Mblfpriced"); // // if(query.list().size()>0){ // upload.setPriced((List<Mblfpriced>) query.list()); // } // // query = session.createQuery("from Mblfprlin"); // // if(query.list().size()>0){ // upload.setPrlin((List<Mblfprlin>) query.list()); // } // // query = session.createQuery("from Mblfreasonb"); // // if(query.list().size()>0){ // upload.setReasonb((List<Mblfreasonb>) query.list()); // } // // query = session.createQuery("from Mblfrute"); // // if(query.list().size()>0){ // upload.setRute((List<Mblfrute>) query.list()); // } // // query = session.createQuery("from Mblfstcust"); // // if(query.list().size()>0){ // upload.setStcust((List<Mblfstcust>) query.list()); // } // // query = session.createQuery("from Mblftarget"); // // if(query.list().size()>0){ // upload.setTarget((List<Mblftarget>) query.list()); // } // // query = session.createQuery("from Mblftpr3d1"); // // if(query.list().size()>0){ // upload.setTpr3d1((List<Mblftpr3d1>) query.list()); // } // // // query = session.createQuery("from Mblftpr3d3"); // // if(query.list().size()>0){ // upload.setTpr3d3((List<Mblftpr3d3>) query.list()); // } // // query = session.createQuery("from Mblftpr3d6"); // // if(query.list().size()>0){ // upload.setTpr3d6((List<Mblftpr3d6>) query.list()); // } // // query = session.createQuery("from Mblftpr3h"); // // if(query.list().size()>0){ // upload.setTpr3h((List<Mblftpr3h>) query.list()); // } // // query = session.createQuery("from Mblftransfer"); // // if(query.list().size()>0){ // upload.setTransfer((List<Mblftransfer>) query.list()); // } // // query = session.createQuery("from Mblftretur"); // // if(query.list().size()>0){ // upload.setRetur((List<Mblftretur>) query.list()); // } // // query = session.createQuery("from Mblftypeout"); // // if(query.list().size()>0){ // upload.setTypeout((List<Mblftypeout>) query.list()); // } // // query = session.createQuery("from Mblfvdisinvd"); // // if(query.list().size()>0){ // upload.setVdisinvd((List<Mblfvdisinvd>) query.list()); // } // // query = session.createQuery("from Mblfvdisinvh"); // // if(query.list().size()>0){ // upload.setVdisinvh((List<Mblfvdisinvh>) query.list()); // } // // query = session.createQuery("from Mbltbiaya"); // // if(query.list().size()>0){ // upload.setTbiaya((List<Mbltbiaya>) query.list()); // } } catch (Exception e) { System.out.println("Error "+e.getMessage()); } session.close(); String json = gson.toJson(upload); System.out.println("json "+json); // // RestClient.get().uploadData(upload, new Callback<String>() { // // @Override // public void success(String t, Response rspns) { // System.out.println("sukses "+t); // } // // @Override // public void failure(RetrofitError re) { // System.out.println("RetrofitError "+re.getMessage()); // } // }); // } static ArrayList<Mblfaktivitas> removeDuplicatesActivity(ArrayList<Mblfaktivitas> list) { // Store unique items in result. ArrayList<Mblfaktivitas> result = new ArrayList<>(); // Record encountered Strings in HashSet. HashSet<Mblfaktivitas> set = new HashSet<>(); // Loop over argument list. for (Mblfaktivitas item : list) { // If String is not in set, add it to the list and the set. if (!set.contains(item)) { result.add(item); set.add(item); } } return result; } }
package com.google.cloud.speech.v1p1beta1; import static io.grpc.stub.ClientCalls.asyncUnaryCall; import static io.grpc.stub.ClientCalls.asyncServerStreamingCall; import static io.grpc.stub.ClientCalls.asyncClientStreamingCall; import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall; import static io.grpc.stub.ClientCalls.blockingUnaryCall; import static io.grpc.stub.ClientCalls.blockingServerStreamingCall; import static io.grpc.stub.ClientCalls.futureUnaryCall; import static io.grpc.MethodDescriptor.generateFullMethodName; import static io.grpc.stub.ServerCalls.asyncUnaryCall; import static io.grpc.stub.ServerCalls.asyncServerStreamingCall; import static io.grpc.stub.ServerCalls.asyncClientStreamingCall; import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall; import static io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall; import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall; /** * <pre> * Service that implements Google Cloud Speech API. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler (version 1.2.0)", comments = "Source: google/cloud/speech/v1_1beta1/cloud_speech.proto") public final class SpeechGrpc { private SpeechGrpc() {} public static final String SERVICE_NAME = "google.cloud.speech.v1p1beta1.Speech"; // Static method descriptors that strictly reflect the proto. @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") public static final io.grpc.MethodDescriptor<com.google.cloud.speech.v1p1beta1.RecognizeRequest, com.google.cloud.speech.v1p1beta1.RecognizeResponse> METHOD_RECOGNIZE = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, generateFullMethodName( "google.cloud.speech.v1p1beta1.Speech", "Recognize"), io.grpc.protobuf.ProtoUtils.marshaller(com.google.cloud.speech.v1p1beta1.RecognizeRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(com.google.cloud.speech.v1p1beta1.RecognizeResponse.getDefaultInstance())); @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") public static final io.grpc.MethodDescriptor<com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest, com.google.longrunning.Operation> METHOD_LONG_RUNNING_RECOGNIZE = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, generateFullMethodName( "google.cloud.speech.v1p1beta1.Speech", "LongRunningRecognize"), io.grpc.protobuf.ProtoUtils.marshaller(com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(com.google.longrunning.Operation.getDefaultInstance())); @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") public static final io.grpc.MethodDescriptor<com.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest, com.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse> METHOD_STREAMING_RECOGNIZE = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING, generateFullMethodName( "google.cloud.speech.v1p1beta1.Speech", "StreamingRecognize"), io.grpc.protobuf.ProtoUtils.marshaller(com.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(com.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse.getDefaultInstance())); /** * Creates a new async stub that supports all call types for the service */ public static SpeechStub newStub(io.grpc.Channel channel) { return new SpeechStub(channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static SpeechBlockingStub newBlockingStub( io.grpc.Channel channel) { return new SpeechBlockingStub(channel); } /** * Creates a new ListenableFuture-style stub that supports unary and streaming output calls on the service */ public static SpeechFutureStub newFutureStub( io.grpc.Channel channel) { return new SpeechFutureStub(channel); } /** * <pre> * Service that implements Google Cloud Speech API. * </pre> */ public static abstract class SpeechImplBase implements io.grpc.BindableService { /** * <pre> * Performs synchronous speech recognition: receive results after all audio * has been sent and processed. * </pre> */ public void recognize(com.google.cloud.speech.v1p1beta1.RecognizeRequest request, io.grpc.stub.StreamObserver<com.google.cloud.speech.v1p1beta1.RecognizeResponse> responseObserver) { asyncUnimplementedUnaryCall(METHOD_RECOGNIZE, responseObserver); } /** * <pre> * Performs asynchronous speech recognition: receive results via the * google.longrunning.Operations interface. Returns either an * `Operation.error` or an `Operation.response` which contains * a `LongRunningRecognizeResponse` message. * </pre> */ public void longRunningRecognize(com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { asyncUnimplementedUnaryCall(METHOD_LONG_RUNNING_RECOGNIZE, responseObserver); } /** * <pre> * Performs bidirectional streaming speech recognition: receive results while * sending audio. This method is only available via the gRPC API (not REST). * </pre> */ public io.grpc.stub.StreamObserver<com.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest> streamingRecognize( io.grpc.stub.StreamObserver<com.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse> responseObserver) { return asyncUnimplementedStreamingCall(METHOD_STREAMING_RECOGNIZE, responseObserver); } @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( METHOD_RECOGNIZE, asyncUnaryCall( new MethodHandlers< com.google.cloud.speech.v1p1beta1.RecognizeRequest, com.google.cloud.speech.v1p1beta1.RecognizeResponse>( this, METHODID_RECOGNIZE))) .addMethod( METHOD_LONG_RUNNING_RECOGNIZE, asyncUnaryCall( new MethodHandlers< com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest, com.google.longrunning.Operation>( this, METHODID_LONG_RUNNING_RECOGNIZE))) .addMethod( METHOD_STREAMING_RECOGNIZE, asyncBidiStreamingCall( new MethodHandlers< com.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest, com.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse>( this, METHODID_STREAMING_RECOGNIZE))) .build(); } } /** * <pre> * Service that implements Google Cloud Speech API. * </pre> */ public static final class SpeechStub extends io.grpc.stub.AbstractStub<SpeechStub> { private SpeechStub(io.grpc.Channel channel) { super(channel); } private SpeechStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected SpeechStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new SpeechStub(channel, callOptions); } /** * <pre> * Performs synchronous speech recognition: receive results after all audio * has been sent and processed. * </pre> */ public void recognize(com.google.cloud.speech.v1p1beta1.RecognizeRequest request, io.grpc.stub.StreamObserver<com.google.cloud.speech.v1p1beta1.RecognizeResponse> responseObserver) { asyncUnaryCall( getChannel().newCall(METHOD_RECOGNIZE, getCallOptions()), request, responseObserver); } /** * <pre> * Performs asynchronous speech recognition: receive results via the * google.longrunning.Operations interface. Returns either an * `Operation.error` or an `Operation.response` which contains * a `LongRunningRecognizeResponse` message. * </pre> */ public void longRunningRecognize(com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { asyncUnaryCall( getChannel().newCall(METHOD_LONG_RUNNING_RECOGNIZE, getCallOptions()), request, responseObserver); } /** * <pre> * Performs bidirectional streaming speech recognition: receive results while * sending audio. This method is only available via the gRPC API (not REST). * </pre> */ public io.grpc.stub.StreamObserver<com.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest> streamingRecognize( io.grpc.stub.StreamObserver<com.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse> responseObserver) { return asyncBidiStreamingCall( getChannel().newCall(METHOD_STREAMING_RECOGNIZE, getCallOptions()), responseObserver); } } /** * <pre> * Service that implements Google Cloud Speech API. * </pre> */ public static final class SpeechBlockingStub extends io.grpc.stub.AbstractStub<SpeechBlockingStub> { private SpeechBlockingStub(io.grpc.Channel channel) { super(channel); } private SpeechBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected SpeechBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new SpeechBlockingStub(channel, callOptions); } /** * <pre> * Performs synchronous speech recognition: receive results after all audio * has been sent and processed. * </pre> */ public com.google.cloud.speech.v1p1beta1.RecognizeResponse recognize(com.google.cloud.speech.v1p1beta1.RecognizeRequest request) { return blockingUnaryCall( getChannel(), METHOD_RECOGNIZE, getCallOptions(), request); } /** * <pre> * Performs asynchronous speech recognition: receive results via the * google.longrunning.Operations interface. Returns either an * `Operation.error` or an `Operation.response` which contains * a `LongRunningRecognizeResponse` message. * </pre> */ public com.google.longrunning.Operation longRunningRecognize(com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest request) { return blockingUnaryCall( getChannel(), METHOD_LONG_RUNNING_RECOGNIZE, getCallOptions(), request); } } /** * <pre> * Service that implements Google Cloud Speech API. * </pre> */ public static final class SpeechFutureStub extends io.grpc.stub.AbstractStub<SpeechFutureStub> { private SpeechFutureStub(io.grpc.Channel channel) { super(channel); } private SpeechFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected SpeechFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new SpeechFutureStub(channel, callOptions); } /** * <pre> * Performs synchronous speech recognition: receive results after all audio * has been sent and processed. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.cloud.speech.v1p1beta1.RecognizeResponse> recognize( com.google.cloud.speech.v1p1beta1.RecognizeRequest request) { return futureUnaryCall( getChannel().newCall(METHOD_RECOGNIZE, getCallOptions()), request); } /** * <pre> * Performs asynchronous speech recognition: receive results via the * google.longrunning.Operations interface. Returns either an * `Operation.error` or an `Operation.response` which contains * a `LongRunningRecognizeResponse` message. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> longRunningRecognize( com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest request) { return futureUnaryCall( getChannel().newCall(METHOD_LONG_RUNNING_RECOGNIZE, getCallOptions()), request); } } private static final int METHODID_RECOGNIZE = 0; private static final int METHODID_LONG_RUNNING_RECOGNIZE = 1; private static final int METHODID_STREAMING_RECOGNIZE = 2; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final SpeechImplBase serviceImpl; private final int methodId; MethodHandlers(SpeechImplBase serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_RECOGNIZE: serviceImpl.recognize((com.google.cloud.speech.v1p1beta1.RecognizeRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.speech.v1p1beta1.RecognizeResponse>) responseObserver); break; case METHODID_LONG_RUNNING_RECOGNIZE: serviceImpl.longRunningRecognize((com.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_STREAMING_RECOGNIZE: return (io.grpc.stub.StreamObserver<Req>) serviceImpl.streamingRecognize( (io.grpc.stub.StreamObserver<com.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse>) responseObserver); default: throw new AssertionError(); } } } private static final class SpeechDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier { @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.speech.v1p1beta1.SpeechProto.getDescriptor(); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (SpeechGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new SpeechDescriptorSupplier()) .addMethod(METHOD_RECOGNIZE) .addMethod(METHOD_LONG_RUNNING_RECOGNIZE) .addMethod(METHOD_STREAMING_RECOGNIZE) .build(); } } } return result; } }
/* * Copyright 2006 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; import com.google.javascript.jscomp.GlobalNamespace.AstChange; import com.google.javascript.jscomp.GlobalNamespace.Name; import com.google.javascript.jscomp.GlobalNamespace.Ref; import com.google.javascript.jscomp.GlobalNamespace.Ref.Type; import com.google.javascript.jscomp.ReferenceCollectingCallback.Reference; import com.google.javascript.jscomp.ReferenceCollectingCallback.ReferenceCollection; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.TokenStream; import com.google.javascript.rhino.TypeI; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * Flattens global objects/namespaces by replacing each '.' with '$' in * their names. This reduces the number of property lookups the browser has * to do and allows the {@link RenameVars} pass to shorten namespaced names. * For example, goog.events.handleEvent() -> goog$events$handleEvent() -> Za(). * * <p>If a global object's name is assigned to more than once, or if a property * is added to the global object in a complex expression, then none of its * properties will be collapsed (for safety/correctness). * * <p>If, after a global object is declared, it is never referenced except when * its properties are read or set, then the object will be removed after its * properties have been collapsed. * * <p>Uninitialized variable stubs are created at a global object's declaration * site for any of its properties that are added late in a local scope. * * <p> Static properties of constructors are always collapsed, unsafely! * For other objects: if, after an object is declared, it is referenced directly * in a way that might create an alias for it, then none of its properties will * be collapsed. * This behavior is a safeguard to prevent the values associated with the * flattened names from getting out of sync with the object's actual property * values. For example, in the following case, an alias a$b, if created, could * easily keep the value 0 even after a.b became 5: * <code> a = {b: 0}; c = a; c.b = 5; </code>. * * <p>This pass doesn't flatten property accesses of the form: a[b]. * * <p>For lots of examples, see the unit test. * */ class CollapseProperties implements CompilerPass { // Warnings static final DiagnosticType UNSAFE_NAMESPACE_WARNING = DiagnosticType.warning( "JSC_UNSAFE_NAMESPACE", "incomplete alias created for namespace {0}"); static final DiagnosticType NAMESPACE_REDEFINED_WARNING = DiagnosticType.warning( "JSC_NAMESPACE_REDEFINED", "namespace {0} should not be redefined"); static final DiagnosticType UNSAFE_THIS = DiagnosticType.warning( "JSC_UNSAFE_THIS", "dangerous use of 'this' in static method {0}"); static final DiagnosticType UNSAFE_CTOR_ALIASING = DiagnosticType.warning( "JSC_UNSAFE_CTOR_ALIASING", "Variable {0} aliases a constructor, " + "so it cannot be assigned multiple times"); private AbstractCompiler compiler; /** Global namespace tree */ private List<Name> globalNames; /** Maps names (e.g. "a.b.c") to nodes in the global namespace tree */ private Map<String, Name> nameMap; CollapseProperties(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { GlobalNamespace namespace; namespace = new GlobalNamespace(compiler, root); inlineAliases(namespace); nameMap = namespace.getNameIndex(); globalNames = namespace.getNameForest(); checkNamespaces(); for (Name name : globalNames) { flattenReferencesToCollapsibleDescendantNames(name, name.getBaseName()); } // We collapse property definitions after collapsing property references // because this step can alter the parse tree above property references, // invalidating the node ancestry stored with each reference. for (Name name : globalNames) { collapseDeclarationOfNameAndDescendants(name, name.getBaseName()); } } /** * For each qualified name N in the global scope, we check if: * (a) No ancestor of N is ever aliased or assigned an unknown value type. * (If N = "a.b.c", "a" and "a.b" are never aliased). * (b) N has exactly one write, and it lives in the global scope. * (c) N is aliased in a local scope. * (d) N is aliased in global scope * * If (a) is true, then GlobalNamespace must know all the writes to N. * If (a) and (b) are true, then N cannot change during the execution of * a local scope. * If (a) and (b) and (c) are true, then the alias can be inlined if the * alias obeys the usual rules for how we decide whether a variable is * inlineable. * If (a) and (b) and (d) are true, then inline the alias if possible (if * it is assigned exactly once unconditionally). * @see InlineVariables */ private void inlineAliases(GlobalNamespace namespace) { // Invariant: All the names in the worklist meet condition (a). Deque<Name> workList = new ArrayDeque<>(namespace.getNameForest()); while (!workList.isEmpty()) { Name name = workList.pop(); // Don't attempt to inline a getter or setter property as a variable. if (name.type == Name.Type.GET || name.type == Name.Type.SET) { continue; } if (!name.inExterns && name.globalSets == 1 && name.localSets == 0 && name.aliasingGets > 0) { // {@code name} meets condition (b). Find all of its local aliases // and try to inline them. List<Ref> refs = new ArrayList<>(name.getRefs()); for (Ref ref : refs) { if (ref.type == Type.ALIASING_GET && ref.scope.isLocal()) { // {@code name} meets condition (c). Try to inline it. // TODO(johnlenz): consider picking up new aliases at the end // of the pass instead of immediately like we do for global // inlines. if (inlineAliasIfPossible(name, ref, namespace)) { name.removeRef(ref); } } else if (ref.type == Type.ALIASING_GET && ref.scope.isGlobal() && ref.getTwin() == null) { // ignore aliases in chained assignments if (inlineGlobalAliasIfPossible(name, ref, namespace)) { name.removeRef(ref); } } } } // Check if {@code name} has any aliases left after the // local-alias-inlining above. if ((name.type == Name.Type.OBJECTLIT || name.type == Name.Type.FUNCTION) && name.aliasingGets == 0 && name.props != null) { // All of {@code name}'s children meet condition (a), so they can be // added to the worklist. workList.addAll(name.props); } } } /** * Attempt to inline an global alias of a global name. This requires that * the name is well defined: assigned unconditionally, assigned exactly once. * It is assumed that, the name for which it is an alias must already * meet these same requirements. * * @param alias The alias to inline * @return Whether the alias was inlined. */ private boolean inlineGlobalAliasIfPossible( Name name, Ref alias, GlobalNamespace namespace) { // Ensure that the alias is assigned to global name at that the // declaration. Node aliasParent = alias.node.getParent(); if ((aliasParent.isAssign() || aliasParent.isName()) && NodeUtil.isExecutedExactlyOnce(aliasParent) // We special-case for constructors here, to inline constructor aliases // more aggressively in global scope. // We do this because constructor properties are always collapsed, // so we want to inline the aliases also to avoid breakages. // TODO(tbreisacher): Do we still need this special case? || aliasParent.isName() && name.isConstructor()) { Node lvalue = aliasParent.isName() ? aliasParent : aliasParent.getFirstChild(); if (!lvalue.isQualifiedName()) { return false; } if (lvalue.isName() && compiler.getCodingConvention().isExported( lvalue.getString(), /* local */ false)) { return false; } name = namespace.getSlot(lvalue.getQualifiedName()); if (name != null && name.isInlinableGlobalAlias()) { Set<AstChange> newNodes = new LinkedHashSet<>(); List<Ref> refs = new ArrayList<>(name.getRefs()); for (Ref ref : refs) { switch (ref.type) { case SET_FROM_GLOBAL: continue; case DIRECT_GET: case ALIASING_GET: case PROTOTYPE_GET: case CALL_GET: Node newNode = alias.node.cloneTree(); Node node = ref.node; node.getParent().replaceChild(node, newNode); newNodes.add(new AstChange(ref.module, ref.scope, newNode)); name.removeRef(ref); break; default: throw new IllegalStateException(); } } rewriteAliasProps(name, alias.node, 0, newNodes); // just set the original alias to null. aliasParent.replaceChild(alias.node, IR.nullNode()); compiler.reportCodeChange(); // Inlining the variable may have introduced new references // to descendants of {@code name}. So those need to be collected now. namespace.scanNewNodes(newNodes); return true; } } return false; } /** * @param name The Name whose properties references should be updated. * @param value The value to use when rewriting. * @param depth The chain depth. * @param newNodes Expression nodes that have been updated. */ private static void rewriteAliasProps( Name name, Node value, int depth, Set<AstChange> newNodes) { if (name.props == null) { return; } Preconditions.checkState( !value.matchesQualifiedName(name.getFullName()), "%s should not match name %s", value, name.getFullName()); for (Name prop : name.props) { rewriteAliasProps(prop, value, depth + 1, newNodes); List<Ref> refs = new ArrayList<>(prop.getRefs()); for (Ref ref : refs) { Node target = ref.node; for (int i = 0; i <= depth; i++) { if (target.isGetProp()) { target = target.getFirstChild(); } else if (NodeUtil.isObjectLitKey(target)) { // Object literal key definitions are a little trickier, as we // need to find the assignment target Node gparent = target.getParent().getParent(); if (gparent.isAssign()) { target = gparent.getFirstChild(); } else { Preconditions.checkState(NodeUtil.isObjectLitKey(gparent)); target = gparent; } } else { throw new IllegalStateException("unexpected: " + target); } } Preconditions.checkState(target.isGetProp() || target.isName()); target.getParent().replaceChild(target, value.cloneTree()); prop.removeRef(ref); // Rescan the expression root. newNodes.add(new AstChange(ref.module, ref.scope, ref.node)); } } } private boolean inlineAliasIfPossible( Name name, Ref alias, GlobalNamespace namespace) { // Ensure that the alias is assigned to a local variable at that // variable's declaration. If the alias's parent is a NAME, // then the NAME must be the child of a VAR node, and we must // be in a VAR assignment. Node aliasParent = alias.node.getParent(); if (aliasParent.isName()) { // Ensure that the local variable is well defined and never reassigned. Scope scope = alias.scope; String aliasVarName = aliasParent.getString(); Var aliasVar = scope.getVar(aliasVarName); ReferenceCollectingCallback collector = new ReferenceCollectingCallback(compiler, ReferenceCollectingCallback.DO_NOTHING_BEHAVIOR, Predicates.equalTo(aliasVar)); collector.processScope(scope); ReferenceCollection aliasRefs = collector.getReferences(aliasVar); Set<AstChange> newNodes = new LinkedHashSet<>(); if (aliasRefs.isWellDefined() && aliasRefs.firstReferenceIsAssigningDeclaration()) { if (!aliasRefs.isAssignedOnceInLifetime()) { // Static properties of constructors are always collapsed. // So, if a constructor is aliased and its properties are accessed from // the alias, we would like to inline the alias here to access the // properties correctly. // But if the aliased variable is assigned more than once, we can't // inline, so we warn. if (name.isConstructor()) { boolean accessPropsAfterAliasing = false; for (Reference ref : aliasRefs.references) { if (ref.getNode().getParent().isGetProp()) { accessPropsAfterAliasing = true; break; } } if (accessPropsAfterAliasing) { compiler.report( JSError.make(aliasParent, UNSAFE_CTOR_ALIASING, aliasVarName)); } } return false; } // The alias is well-formed, so do the inlining now. int size = aliasRefs.references.size(); for (int i = 1; i < size; i++) { ReferenceCollectingCallback.Reference aliasRef = aliasRefs.references.get(i); Node newNode = alias.node.cloneTree(); aliasRef.getParent().replaceChild(aliasRef.getNode(), newNode); newNodes.add(new AstChange( getRefModule(aliasRef), aliasRef.getScope(), newNode)); } // just set the original alias to null. aliasParent.replaceChild(alias.node, IR.nullNode()); compiler.reportCodeChange(); // Inlining the variable may have introduced new references // to descendants of {@code name}. So those need to be collected now. namespace.scanNewNodes(newNodes); return true; } } return false; } JSModule getRefModule(ReferenceCollectingCallback.Reference ref) { CompilerInput input = compiler.getInput(ref.getInputId()); return input == null ? null : input.getModule(); } /** * Runs through all namespaces (prefixes of classes and enums), and checks if * any of them have been used in an unsafe way. */ private void checkNamespaces() { for (Name name : nameMap.values()) { if (name.isNamespaceObjectLit() && (name.aliasingGets > 0 || name.localSets + name.globalSets > 1 || name.deleteProps > 0)) { boolean initialized = name.getDeclaration() != null; for (Ref ref : name.getRefs()) { if (ref == name.getDeclaration()) { continue; } if (ref.type == Ref.Type.DELETE_PROP) { if (initialized) { warnAboutNamespaceRedefinition(name, ref); } } else if ( ref.type == Ref.Type.SET_FROM_GLOBAL || ref.type == Ref.Type.SET_FROM_LOCAL) { if (initialized && !isSafeNamespaceReinit(ref)) { warnAboutNamespaceRedefinition(name, ref); } initialized = true; } else if (ref.type == Ref.Type.ALIASING_GET) { warnAboutNamespaceAliasing(name, ref); } } } } } private boolean isSafeNamespaceReinit(Ref ref) { // allow "a = a || {}" or "var a = a || {}" Node valParent = getValueParent(ref); Node val = valParent.getLastChild(); if (val.getType() == Token.OR) { Node maybeName = val.getFirstChild(); if (ref.node.matchesQualifiedName(maybeName)) { return true; } } return false; } /** * Gets the parent node of the value for any assignment to a Name. * For example, in the assignment * {@code var x = 3;} * the parent would be the NAME node. */ private static Node getValueParent(Ref ref) { // there are two types of declarations: VARs and ASSIGNs return (ref.node.getParent() != null && ref.node.getParent().isVar()) ? ref.node : ref.node.getParent(); } /** * Reports a warning because a namespace was aliased. * * @param nameObj A namespace that is being aliased * @param ref The reference that forced the alias */ private void warnAboutNamespaceAliasing(Name nameObj, Ref ref) { compiler.report( JSError.make(ref.node, UNSAFE_NAMESPACE_WARNING, nameObj.getFullName())); } /** * Reports a warning because a namespace was redefined. * * @param nameObj A namespace that is being redefined * @param ref The reference that set the namespace */ private void warnAboutNamespaceRedefinition(Name nameObj, Ref ref) { compiler.report( JSError.make(ref.node, NAMESPACE_REDEFINED_WARNING, nameObj.getFullName())); } /** * Flattens all references to collapsible properties of a global name except * their initial definitions. Recurs on subnames. * * @param n An object representing a global name * @param alias The flattened name for {@code n} */ private void flattenReferencesToCollapsibleDescendantNames( Name n, String alias) { if (n.props == null || n.isCollapsingExplicitlyDenied()) { return; } for (Name p : n.props) { String propAlias = appendPropForAlias(alias, p.getBaseName()); if (p.canCollapse()) { flattenReferencesTo(p, propAlias); } else if (p.isSimpleStubDeclaration() && !p.isCollapsingExplicitlyDenied()) { flattenSimpleStubDeclaration(p, propAlias); } flattenReferencesToCollapsibleDescendantNames(p, propAlias); } } /** * Flattens a stub declaration. * This is mostly a hack to support legacy users. */ private void flattenSimpleStubDeclaration(Name name, String alias) { Ref ref = Iterables.getOnlyElement(name.getRefs()); Node nameNode = NodeUtil.newName( compiler, alias, ref.node, name.getFullName()); Node varNode = IR.var(nameNode).useSourceInfoIfMissingFrom(nameNode); Preconditions.checkState(ref.node.getParent().isExprResult()); Node parent = ref.node.getParent(); Node grandparent = parent.getParent(); grandparent.replaceChild(parent, varNode); compiler.reportCodeChange(); } /** * Flattens all references to a collapsible property of a global name except * its initial definition. * * @param n A global property name (e.g. "a.b" or "a.b.c.d") * @param alias The flattened name (e.g. "a$b" or "a$b$c$d") */ private void flattenReferencesTo(Name n, String alias) { String originalName = n.getFullName(); for (Ref r : n.getRefs()) { if (r == n.getDeclaration()) { // Declarations are handled separately. continue; } Node rParent = r.node.getParent(); // There are two cases when we shouldn't flatten a reference: // 1) Object literal keys, because duplicate keys show up as refs. // 2) References inside a complex assign. (a = x.y = 0). These are // called TWIN references, because they show up twice in the // reference list. Only collapse the set, not the alias. if (!NodeUtil.isObjectLitKey(r.node) && (r.getTwin() == null || r.isSet())) { flattenNameRef(alias, r.node, rParent, originalName); } } // Flatten all occurrences of a name as a prefix of its subnames. For // example, if {@code n} corresponds to the name "a.b", then "a.b" will be // replaced with "a$b" in all occurrences of "a.b.c", "a.b.c.d", etc. if (n.props != null) { for (Name p : n.props) { flattenPrefixes(alias, p, 1); } } } /** * Flattens all occurrences of a name as a prefix of subnames beginning * with a particular subname. * * @param n A global property name (e.g. "a.b.c.d") * @param alias A flattened prefix name (e.g. "a$b") * @param depth The difference in depth between the property name and * the prefix name (e.g. 2) */ private void flattenPrefixes(String alias, Name n, int depth) { // Only flatten the prefix of a name declaration if the name being // initialized is fully qualified (i.e. not an object literal key). String originalName = n.getFullName(); Ref decl = n.getDeclaration(); if (decl != null && decl.node != null && decl.node.isGetProp()) { flattenNameRefAtDepth(alias, decl.node, depth, originalName); } for (Ref r : n.getRefs()) { if (r == decl) { // Declarations are handled separately. continue; } // References inside a complex assign (a = x.y = 0) // have twins. We should only flatten one of the twins. if (r.getTwin() == null || r.isSet()) { flattenNameRefAtDepth(alias, r.node, depth, originalName); } } if (n.props != null) { for (Name p : n.props) { flattenPrefixes(alias, p, depth + 1); } } } /** * Flattens a particular prefix of a single name reference. * * @param alias A flattened prefix name (e.g. "a$b") * @param n The node corresponding to a subproperty name (e.g. "a.b.c.d") * @param depth The difference in depth between the property name and * the prefix name (e.g. 2) * @param originalName String version of the property name. */ private void flattenNameRefAtDepth(String alias, Node n, int depth, String originalName) { // This method has to work for both GETPROP chains and, in rare cases, // OBJLIT keys, possibly nested. That's why we check for children before // proceeding. In the OBJLIT case, we don't need to do anything. int nType = n.getType(); boolean isQName = nType == Token.NAME || nType == Token.GETPROP; boolean isObjKey = NodeUtil.isObjectLitKey(n); Preconditions.checkState(isObjKey || isQName); if (isQName) { for (int i = 1; i < depth && n.hasChildren(); i++) { n = n.getFirstChild(); } if (n.isGetProp() && n.getFirstChild().isGetProp()) { flattenNameRef(alias, n.getFirstChild(), n, originalName); } } } /** * Replaces a GETPROP a.b.c with a NAME a$b$c. * * @param alias A flattened prefix name (e.g. "a$b") * @param n The GETPROP node corresponding to the original name (e.g. "a.b") * @param parent {@code n}'s parent * @param originalName String version of the property name. */ private void flattenNameRef(String alias, Node n, Node parent, String originalName) { Preconditions.checkArgument(n.isGetProp(), "Expected GETPROP, found %s. Node: %s", Token.name(n.getType()), n); // BEFORE: // getprop // getprop // name a // string b // string c // AFTER: // name a$b$c Node ref = NodeUtil.newName(compiler, alias, n, originalName); NodeUtil.copyNameAnnotations(n.getLastChild(), ref); if (parent.isCall() && n == parent.getFirstChild()) { // The node was a call target, we are deliberately flatten these as // we node the "this" isn't provided by the namespace. Mark it as such: parent.putBooleanProp(Node.FREE_CALL, true); } TypeI type = n.getTypeI(); if (type != null) { ref.setTypeI(type); } parent.replaceChild(n, ref); compiler.reportCodeChange(); } /** * Collapses definitions of the collapsible properties of a global name. * Recurs on subnames that also represent JavaScript objects with * collapsible properties. * * @param n A node representing a global name * @param alias The flattened name for {@code n} */ private void collapseDeclarationOfNameAndDescendants(Name n, String alias) { boolean canCollapseChildNames = n.canCollapseUnannotatedChildNames(); // Handle this name first so that nested object literals get unrolled. if (n.canCollapse()) { updateObjLitOrFunctionDeclaration(n, alias, canCollapseChildNames); } if (n.props == null) { return; } for (Name p : n.props) { // Recur first so that saved node ancestries are intact when needed. collapseDeclarationOfNameAndDescendants( p, appendPropForAlias(alias, p.getBaseName())); if (!p.inExterns && canCollapseChildNames && p.getDeclaration() != null && p.canCollapse() && p.getDeclaration().node != null && p.getDeclaration().node.getParent() != null && p.getDeclaration().node.getParent().isAssign()) { updateSimpleDeclaration( appendPropForAlias(alias, p.getBaseName()), p, p.getDeclaration()); } } } /** * Updates the initial assignment to a collapsible property at global scope * by changing it to a variable declaration (e.g. a.b = 1 -> var a$b = 1). * The property's value may either be a primitive or an object literal or * function whose properties aren't collapsible. * * @param alias The flattened property name (e.g. "a$b") * @param refName The name for the reference being updated. * @param ref An object containing information about the assignment getting * updated */ private void updateSimpleDeclaration(String alias, Name refName, Ref ref) { Node rvalue = ref.node.getNext(); Node parent = ref.node.getParent(); Node grandparent = parent.getParent(); Node greatGrandparent = grandparent.getParent(); if (rvalue != null && rvalue.isFunction()) { checkForHosedThisReferences(rvalue, refName.docInfo, refName); } // Create the new alias node. Node nameNode = NodeUtil.newName(compiler, alias, grandparent.getFirstChild(), refName.getFullName()); NodeUtil.copyNameAnnotations(ref.node.getLastChild(), nameNode); if (grandparent.isExprResult()) { // BEFORE: a.b.c = ...; // exprstmt // assign // getprop // getprop // name a // string b // string c // NODE // AFTER: var a$b$c = ...; // var // name a$b$c // NODE // Remove the r-value (NODE). parent.removeChild(rvalue); nameNode.addChildToFront(rvalue); Node varNode = IR.var(nameNode); greatGrandparent.replaceChild(grandparent, varNode); } else { // This must be a complex assignment. Preconditions.checkNotNull(ref.getTwin()); // BEFORE: // ... (x.y = 3); // // AFTER: // var x$y; // ... (x$y = 3); Node current = grandparent; Node currentParent = grandparent.getParent(); for (; !currentParent.isScript() && !currentParent.isBlock(); current = currentParent, currentParent = currentParent.getParent()) {} // Create a stub variable declaration right // before the current statement. Node stubVar = IR.var(nameNode.cloneTree()) .useSourceInfoIfMissingFrom(nameNode); currentParent.addChildBefore(stubVar, current); parent.replaceChild(ref.node, nameNode); } compiler.reportCodeChange(); } /** * Updates the first initialization (a.k.a "declaration") of a global name. * This involves flattening the global name (if it's not just a global * variable name already), collapsing object literal keys into global * variables, declaring stub global variables for properties added later * in a local scope. * * It may seem odd that this function also takes care of declaring stubs * for direct children. The ultimate goal of this function is to eliminate * the global name entirely (when possible), so that "middlemen" namespaces * disappear, and to do that we need to make sure that all the direct children * will be collapsed as well. * * @param n An object representing a global name (e.g. "a", "a.b.c") * @param alias The flattened name for {@code n} (e.g. "a", "a$b$c") * @param canCollapseChildNames Whether it's possible to collapse children of * this name. (This is mostly passed for convenience; it's equivalent to * n.canCollapseChildNames()). */ private void updateObjLitOrFunctionDeclaration( Name n, String alias, boolean canCollapseChildNames) { Ref decl = n.getDeclaration(); if (decl == null) { // Some names do not have declarations, because they // are only defined in local scopes. return; } if (decl.getTwin() != null) { // Twin declarations will get handled when normal references // are handled. return; } switch (decl.node.getParent().getType()) { case Token.ASSIGN: updateObjLitOrFunctionDeclarationAtAssignNode( n, alias, canCollapseChildNames); break; case Token.VAR: updateObjLitOrFunctionDeclarationAtVarNode(n, canCollapseChildNames); break; case Token.FUNCTION: updateFunctionDeclarationAtFunctionNode(n, canCollapseChildNames); break; } } /** * Updates the first initialization (a.k.a "declaration") of a global name * that occurs at an ASSIGN node. See comment for * {@link #updateObjLitOrFunctionDeclaration}. * * @param n An object representing a global name (e.g. "a", "a.b.c") * @param alias The flattened name for {@code n} (e.g. "a", "a$b$c") */ private void updateObjLitOrFunctionDeclarationAtAssignNode( Name n, String alias, boolean canCollapseChildNames) { // NOTE: It's important that we don't add additional nodes // (e.g. a var node before the exprstmt) because the exprstmt might be // the child of an if statement that's not inside a block). Ref ref = n.getDeclaration(); Node rvalue = ref.node.getNext(); Node varNode = new Node(Token.VAR); Node varParent = ref.node.getAncestor(3); Node grandparent = ref.node.getAncestor(2); boolean isObjLit = rvalue.isObjectLit(); boolean insertedVarNode = false; if (isObjLit && n.canEliminate()) { // Eliminate the object literal altogether. varParent.replaceChild(grandparent, varNode); ref.node = null; insertedVarNode = true; } else if (!n.isSimpleName()) { // Create a VAR node to declare the name. if (rvalue.isFunction()) { checkForHosedThisReferences(rvalue, n.docInfo, n); } ref.node.getParent().removeChild(rvalue); Node nameNode = NodeUtil.newName(compiler, alias, ref.node.getAncestor(2), n.getFullName()); JSDocInfo info = NodeUtil.getBestJSDocInfo(ref.node.getParent()); if (ref.node.getLastChild().getBooleanProp(Node.IS_CONSTANT_NAME) || (info != null && info.isConstant())) { nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } if (info != null) { varNode.setJSDocInfo(info); } varNode.addChildToBack(nameNode); nameNode.addChildToFront(rvalue); varParent.replaceChild(grandparent, varNode); // Update the node ancestry stored in the reference. ref.node = nameNode; insertedVarNode = true; } if (canCollapseChildNames) { if (isObjLit) { declareVarsForObjLitValues( n, alias, rvalue, varNode, varParent.getChildBefore(varNode), varParent); } addStubsForUndeclaredProperties(n, alias, varParent, varNode); } if (insertedVarNode) { if (!varNode.hasChildren()) { varParent.removeChild(varNode); } compiler.reportCodeChange(); } } /** * Warns about any references to "this" in the given FUNCTION. The function * is getting collapsed, so the references will change. */ private void checkForHosedThisReferences(Node function, JSDocInfo docInfo, final Name name) { // A function is getting collapsed. Make sure that if it refers to // "this", it must be a constructor or documented with @this. if (docInfo == null || (!docInfo.isConstructor() && !docInfo.hasThisType())) { NodeTraversal.traverseEs6(compiler, function.getLastChild(), new NodeTraversal.AbstractShallowCallback() { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isThis()) { compiler.report( JSError.make(n, UNSAFE_THIS, name.getFullName())); } } }); } } /** * Updates the first initialization (a.k.a "declaration") of a global name * that occurs at a VAR node. See comment for * {@link #updateObjLitOrFunctionDeclaration}. * * @param n An object representing a global name (e.g. "a") */ private void updateObjLitOrFunctionDeclarationAtVarNode( Name n, boolean canCollapseChildNames) { if (!canCollapseChildNames) { return; } Ref ref = n.getDeclaration(); String name = ref.node.getString(); Node rvalue = ref.node.getFirstChild(); Node varNode = ref.node.getParent(); Node grandparent = varNode.getParent(); boolean isObjLit = rvalue.isObjectLit(); int numChanges = 0; if (isObjLit) { numChanges += declareVarsForObjLitValues( n, name, rvalue, varNode, grandparent.getChildBefore(varNode), grandparent); } numChanges += addStubsForUndeclaredProperties(n, name, grandparent, varNode); if (isObjLit && n.canEliminate()) { varNode.removeChild(ref.node); if (!varNode.hasChildren()) { grandparent.removeChild(varNode); } numChanges++; // Clear out the object reference, since we've eliminated it from the // parse tree. ref.node = null; } if (numChanges > 0) { compiler.reportCodeChange(); } } /** * Updates the first initialization (a.k.a "declaration") of a global name * that occurs at a FUNCTION node. See comment for * {@link #updateObjLitOrFunctionDeclaration}. * * @param n An object representing a global name (e.g. "a") */ private void updateFunctionDeclarationAtFunctionNode( Name n, boolean canCollapseChildNames) { if (!canCollapseChildNames || !n.canCollapse()) { return; } Ref ref = n.getDeclaration(); String fnName = ref.node.getString(); addStubsForUndeclaredProperties( n, fnName, ref.node.getAncestor(2), ref.node.getParent()); } /** * Declares global variables to serve as aliases for the values in an object * literal, optionally removing all of the object literal's keys and values. * * @param alias The object literal's flattened name (e.g. "a$b$c") * @param objlit The OBJLIT node * @param varNode The VAR node to which new global variables should be added * as children * @param nameToAddAfter The child of {@code varNode} after which new * variables should be added (may be null) * @param varParent {@code varNode}'s parent * @return The number of variables added */ private int declareVarsForObjLitValues( Name objlitName, String alias, Node objlit, Node varNode, Node nameToAddAfter, Node varParent) { int numVars = 0; int arbitraryNameCounter = 0; boolean discardKeys = !objlitName.shouldKeepKeys(); for (Node key = objlit.getFirstChild(), nextKey; key != null; key = nextKey) { Node value = key.getFirstChild(); nextKey = key.getNext(); // A get or a set can not be rewritten as a VAR. if (key.isGetterDef() || key.isSetterDef()) { continue; } // We generate arbitrary names for keys that aren't valid JavaScript // identifiers, since those keys are never referenced. (If they were, // this object literal's child names wouldn't be collapsible.) The only // reason that we don't eliminate them entirely is the off chance that // their values are expressions that have side effects. boolean isJsIdentifier = !key.isNumber() && TokenStream.isJSIdentifier(key.getString()); String propName = isJsIdentifier ? key.getString() : String.valueOf(++arbitraryNameCounter); // If the name cannot be collapsed, skip it. String qName = objlitName.getFullName() + '.' + propName; Name p = nameMap.get(qName); if (p != null && !p.canCollapse()) { continue; } String propAlias = appendPropForAlias(alias, propName); Node refNode = null; if (discardKeys) { objlit.removeChild(key); value.detachFromParent(); } else { // Substitute a reference for the value. refNode = IR.name(propAlias); if (key.getBooleanProp(Node.IS_CONSTANT_NAME)) { refNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } key.replaceChild(value, refNode); } // Declare the collapsed name as a variable with the original value. Node nameNode = IR.name(propAlias); nameNode.addChildToFront(value); if (key.getBooleanProp(Node.IS_CONSTANT_NAME)) { nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } Node newVar = IR.var(nameNode).useSourceInfoIfMissingFromForTree(key); if (nameToAddAfter != null) { varParent.addChildAfter(newVar, nameToAddAfter); } else { varParent.addChildBefore(newVar, varNode); } compiler.reportCodeChange(); nameToAddAfter = newVar; // Update the global name's node ancestry if it hasn't already been // done. (Duplicate keys in an object literal can bring us here twice // for the same global name.) if (isJsIdentifier && p != null) { if (!discardKeys) { Ref newAlias = p.getDeclaration().cloneAndReclassify(Ref.Type.ALIASING_GET); newAlias.node = refNode; p.addRef(newAlias); } p.getDeclaration().node = nameNode; if (value.isFunction()) { checkForHosedThisReferences(value, key.getJSDocInfo(), p); } } numVars++; } return numVars; } /** * Adds global variable "stubs" for any properties of a global name that are * only set in a local scope or read but never set. * * @param n An object representing a global name (e.g. "a", "a.b.c") * @param alias The flattened name of the object whose properties we are * adding stubs for (e.g. "a$b$c") * @param parent The node to which new global variables should be added * as children * @param addAfter The child of after which new * variables should be added * @return The number of variables added */ private int addStubsForUndeclaredProperties( Name n, String alias, Node parent, Node addAfter) { Preconditions.checkState(n.canCollapseUnannotatedChildNames(), n); Preconditions.checkArgument(NodeUtil.isStatementBlock(parent), parent); Preconditions.checkNotNull(addAfter); if (n.props == null) { return 0; } int numStubs = 0; for (Name p : n.props) { if (p.needsToBeStubbed()) { String propAlias = appendPropForAlias(alias, p.getBaseName()); Node nameNode = IR.name(propAlias); Node newVar = IR.var(nameNode).useSourceInfoIfMissingFromForTree(addAfter); parent.addChildAfter(newVar, addAfter); addAfter = newVar; numStubs++; compiler.reportCodeChange(); // Determine if this is a constant var by checking the first // reference to it. Don't check the declaration, as it might be null. if (p.getRefs().get(0).node.getLastChild().getBooleanProp( Node.IS_CONSTANT_NAME)) { nameNode.putBooleanProp(Node.IS_CONSTANT_NAME, true); } } } return numStubs; } private String appendPropForAlias(String root, String prop) { if (prop.indexOf('$') != -1) { // Encode '$' in a property as '$0'. Because '0' cannot be the // start of an identifier, this will never conflict with our // encoding from '.' -> '$'. prop = prop.replace("$", "$0"); } String result = root + '$' + prop; int id = 1; while (nameMap.containsKey(result)) { result = root + '$' + prop + '$' + id; id++; } return result; } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.site.impl; import net.sf.ehcache.CacheException; import net.sf.ehcache.Ehcache; import net.sf.ehcache.Element; import net.sf.ehcache.event.CacheEventListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.memory.api.Cache; import org.sakaiproject.memory.api.MemoryService; import org.sakaiproject.site.api.Group; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.api.SitePage; import org.sakaiproject.site.api.ToolConfiguration; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * <p> * SiteCacheImpl is a cache tuned for Site (and page / tool) access. * </p> * @deprecated after 10, remove this for Sakai 11 */ public class SiteCacheImpl implements CacheEventListener, SiteCache { private static Logger M_log = LoggerFactory.getLogger(SiteCacheImpl.class); /** Map of a tool id to a cached site's tool configuration instance. */ protected Map<String, ToolConfiguration> m_tools = new ConcurrentHashMap<String, ToolConfiguration>(); /** Map of a page id to a cached site's SitePage instance. */ protected Map<String, SitePage> m_pages = new ConcurrentHashMap<String, SitePage>(); /** Map of a group id to a cached site's Group instance. */ protected Map<String, Group> m_groups = new ConcurrentHashMap<String, Group>(); /** The base cache. */ protected Cache m_cache = null; ServerConfigurationService serverConfigurationService = null; /*** Variables to implement site cache specific metrics. The usual Ehcache metrics are not * sufficient because we handle the page / tool / group caching outside of Ehcache. ***/ /* Count number of cache event callbacks to the site cache implementation */ private int cacheEventCount = 0; /* Set event interval at which to report the current status of the site cache */ private int cacheEventReportInterval = 0; /** * Construct the Cache. No automatic refresh: expire only, from time and events. * * @param sleep * The number of seconds to sleep between expiration checks. * @param pattern * The "startsWith()" string for all resources that may be in this cache - if null, don't watch events for updates. */ // Modify constructor to allow injecting the server configuration service. public SiteCacheImpl(MemoryService memoryService, long sleep, String pattern, ServerConfigurationService serverConfigurationService) { m_cache = memoryService.newCache( "org.sakaiproject.site.impl.SiteCacheImpl.cache", pattern); // Provide an instance of the server configuration service. this.serverConfigurationService = serverConfigurationService; cacheEventReportInterval = serverConfigurationService.getInt("org.sakaiproject.site.impl.SiteCacheImpl.cache.cacheEventReportInterval", cacheEventReportInterval); } // Supply a default server configuration service if it is not supplied. public SiteCacheImpl(MemoryService memoryService, long sleep, String pattern) { this(memoryService,sleep,pattern, (ServerConfigurationService)org.sakaiproject.component.cover.ServerConfigurationService.getInstance()); } /** * Cache an object * * @param key * The key with which to find the object. * @param payload * The object to cache. */ @Override public void put(String key, Object payload) { m_cache.put(key, payload); } /** * Test for a non expired entry in the cache. * * 2016-09-07 - Mitch Golden - The key may in fact be expired! * Be careful if you use this method * * @param key * The cache key. * @return true if the key maps to a cache entry, false if not. */ @Override public boolean containsKey(String key) { return m_cache.containsKey(key); } /** * Get the non expired entry, or null if not there (or expired) * * @param key * The cache key. * @return The payload, or null if the payload is null, the key is not found, or the entry has expired (Note: use containsKey() to remove this ambiguity). */ @Override public Object get(String key) { return m_cache.get(key); } /** * Clear all entries. */ @Override public void clear() { m_cache.clear(); } /** * Remove this entry from the cache. * * @param key * The cache key. */ @Override public boolean remove(String key) { return m_cache.remove(key); } /** * Access the tool that is part of a cached site, by tool Id. * * @param toolId * The tool's id. * @return The ToolConfiguration that has this id, from a cached site. */ @Override public ToolConfiguration getTool(String toolId) { return (ToolConfiguration) m_tools.get(toolId); } /** * Access the page that is part of a cached site, by page Id. * * @param pageId * The page's id. * @return The SitePage that has this id, from a cached site. */ @Override public SitePage getPage(String pageId) { return (SitePage) m_pages.get(pageId); } /** * Access the group that is part of a cached site, by group Id. * * @param id * The group id. * @return The Group that has this id, from a cached site. */ @Override public Group getGroup(String groupId) { return (Group) m_groups.get(groupId); } private void notifyCacheClear() { // clear the tool ids m_tools.clear(); // clear the pages m_pages.clear(); // clear the groups m_groups.clear(); } private void notifyCachePut(String key, Object payload) { // add the payload (Site) tool ids if (payload instanceof Site) { Site site = (Site) payload; Collection<SitePage> sitePages; Collection<Group> siteGroups; // TODO: If the boolean versions of getPages and getGroups are // added to the Site interface, this check should be removed. if (site instanceof BaseSite) { sitePages = ((BaseSite) site).getPages(false); siteGroups = ((BaseSite) site).getGroups(false); } else { sitePages = site.getPages(); siteGroups = site.getGroups(); } // add the pages and tools to the cache for (Iterator<SitePage> pages = sitePages.iterator(); pages.hasNext();) { SitePage page = (SitePage) pages.next(); m_pages.put(page.getId(), page); for (Iterator<ToolConfiguration> tools = page.getTools().iterator(); tools.hasNext();) { ToolConfiguration tool = (ToolConfiguration) tools.next(); m_tools.put(tool.getId(), tool); } } // add the groups to the cache for (Iterator<Group> groups = siteGroups.iterator(); groups.hasNext();) { Group group = (Group) groups.next(); m_groups.put(group.getId(), group); } } } private void notifyCacheRemove(String key, Object payload) { // clear the tool ids for this site if ((payload != null) && (payload instanceof Site)) { Site site = (Site) payload; for (Iterator<SitePage> pages = site.getPages().iterator(); pages.hasNext();) { SitePage page = (SitePage) pages.next(); m_pages.remove(page.getId()); for (Iterator<ToolConfiguration> tools = page.getTools().iterator(); tools.hasNext();) { ToolConfiguration tool = (ToolConfiguration) tools.next(); m_tools.remove(tool.getId()); } } for (Iterator<Group> groups = site.getGroups().iterator(); groups.hasNext();) { Group group = (Group) groups.next(); m_groups.remove(group.getId()); } } } /*********** * Implement routines for Ehcache event notification. This is to allow explicitly cleaning the * tool, page, group maps. ***********/ public int getCacheEventReportInterval() { return cacheEventReportInterval; } public void setCacheEventReportInterval(int cacheEventReportInterval) { this.cacheEventReportInterval = cacheEventReportInterval; } /* Note that events happen only when there is a change to the contents of the cache * so with an efficient cache configuration the tracking of the events will not be expensive. * If the cache configuration is not efficient then you want to know about it. */ protected void updateSiteCacheStatistics() { if (cacheEventReportInterval == 0) { return; } ++cacheEventCount; if (cacheEventCount % cacheEventReportInterval != 0) { return; } if (M_log.isDebugEnabled()) M_log.debug("SiteCacheSafe:" + " eventCount: " + cacheEventCount + " tools: " + m_tools.size() + " pages: " + m_pages.size() + " groups: " + m_groups.size() ); } public void dispose() { M_log.debug("ehcache event: dispose"); } public void notifyElementEvicted(Ehcache cache, Element element) { if (M_log.isDebugEnabled()) { M_log.debug("ehcache event: notifyElementEvicted: "+element.getKey()); } notifyCacheRemove(element.getObjectKey().toString(), element.getObjectValue()); updateSiteCacheStatistics(); } public void notifyElementExpired(Ehcache cache, Element element) { if (M_log.isDebugEnabled()) { M_log.debug("ehcache event: notifyElementExpired: "+element.getKey()); } notifyCacheRemove(element.getObjectKey().toString(), element.getObjectValue()); updateSiteCacheStatistics(); } public void notifyElementPut(Ehcache cache, Element element) throws CacheException { if (M_log.isDebugEnabled()) { M_log.debug("ehcache event: notifyElementPut: "+element.getKey()); } notifyCachePut(element.getObjectKey().toString(), element.getObjectValue()); updateSiteCacheStatistics(); } public void notifyElementRemoved(Ehcache cache, Element element) throws CacheException { if (M_log.isDebugEnabled()) { M_log.debug("ehcache event: notifyElementRemoved: "+element.getKey()); } notifyCacheRemove(element.getObjectKey().toString(), element.getObjectValue()); updateSiteCacheStatistics(); } public void notifyElementUpdated(Ehcache cache, Element element) throws CacheException { if (M_log.isDebugEnabled()) { M_log.debug("ehcache event: notifyElementUpdated: "+element.getKey()); } updateSiteCacheStatistics(); } public void notifyRemoveAll(Ehcache cache) { if (M_log.isDebugEnabled()) { M_log.debug("ehcache event: notifyRemoveAll"); } notifyCacheClear(); updateSiteCacheStatistics(); } @Override public Object clone() throws CloneNotSupportedException { M_log.debug("ehcache event: clone()"); // Creates a clone of this listener. This method will only be called by ehcache before a cache is initialized. // This may not be possible for listeners after they have been initialized. Implementations should throw CloneNotSupportedException if they do not support clone. throw new CloneNotSupportedException( "CacheEventListener implementations should throw CloneNotSupportedException if they do not support clone"); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.quotas; import java.util.concurrent.TimeUnit; import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceStability; /** * Simple rate limiter. * * Usage Example: * // At this point you have a unlimited resource limiter * RateLimiter limiter = new AverageIntervalRateLimiter(); * or new FixedIntervalRateLimiter(); * limiter.set(10, TimeUnit.SECONDS); // set 10 resources/sec * * while (true) { * // call canExecute before performing resource consuming operation * bool canExecute = limiter.canExecute(); * // If there are no available resources, wait until one is available * if (!canExecute) Thread.sleep(limiter.waitInterval()); * // ...execute the work and consume the resource... * limiter.consume(); * } */ @InterfaceAudience.Private @InterfaceStability.Evolving @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="IS2_INCONSISTENT_SYNC", justification="FindBugs seems confused; says limit and tlimit " + "are mostly synchronized...but to me it looks like they are totally synchronized") public abstract class RateLimiter { public static final String QUOTA_RATE_LIMITER_CONF_KEY = "hbase.quota.rate.limiter"; private long tunit = 1000; // Timeunit factor for translating to ms. private long limit = Long.MAX_VALUE; // The max value available resource units can be refilled to. private long avail = Long.MAX_VALUE; // Currently available resource units /** * Refill the available units w.r.t the elapsed time. * @param limit Maximum available resource units that can be refilled to. * @return how many resource units may be refilled ? */ abstract long refill(long limit); /** * Time in milliseconds to wait for before requesting to consume 'amount' resource. * @param limit Maximum available resource units that can be refilled to. * @param available Currently available resource units * @param amount Resources for which time interval to calculate for * @return estimate of the ms required to wait before being able to provide 'amount' resources. */ abstract long getWaitInterval(long limit, long available, long amount); /** * Set the RateLimiter max available resources and refill period. * @param limit The max value available resource units can be refilled to. * @param timeUnit Timeunit factor for translating to ms. */ public synchronized void set(final long limit, final TimeUnit timeUnit) { switch (timeUnit) { case MILLISECONDS: tunit = 1; break; case SECONDS: tunit = 1000; break; case MINUTES: tunit = 60 * 1000; break; case HOURS: tunit = 60 * 60 * 1000; break; case DAYS: tunit = 24 * 60 * 60 * 1000; break; default: throw new RuntimeException("Unsupported " + timeUnit.name() + " TimeUnit."); } this.limit = limit; this.avail = limit; } @Override public String toString() { String rateLimiter = this.getClass().getSimpleName(); if (getLimit() == Long.MAX_VALUE) { return rateLimiter + "(Bypass)"; } return rateLimiter + "(avail=" + getAvailable() + " limit=" + getLimit() + " tunit=" + getTimeUnitInMillis() + ")"; } /** * Sets the current instance of RateLimiter to a new values. * * if current limit is smaller than the new limit, bump up the available resources. * Otherwise allow clients to use up the previously available resources. */ public synchronized void update(final RateLimiter other) { this.tunit = other.tunit; if (this.limit < other.limit) { // If avail is capped to this.limit, it will never overflow, // otherwise, avail may overflow, just be careful here. long diff = other.limit - this.limit; if (this.avail <= Long.MAX_VALUE - diff) { this.avail += diff; this.avail = Math.min(this.avail, other.limit); } else { this.avail = other.limit; } } this.limit = other.limit; } public synchronized boolean isBypass() { return getLimit() == Long.MAX_VALUE; } public synchronized long getLimit() { return limit; } public synchronized long getAvailable() { return avail; } protected synchronized long getTimeUnitInMillis() { return tunit; } /** * Is there at least one resource available to allow execution? * @return true if there is at least one resource available, otherwise false */ public boolean canExecute() { return canExecute(1); } /** * Are there enough available resources to allow execution? * @param amount the number of required resources, a non-negative number * @return true if there are enough available resources, otherwise false */ public synchronized boolean canExecute(final long amount) { if (isBypass()) { return true; } long refillAmount = refill(limit); if (refillAmount == 0 && avail < amount) { return false; } // check for positive overflow if (avail <= Long.MAX_VALUE - refillAmount) { avail = Math.max(0, Math.min(avail + refillAmount, limit)); } else { avail = Math.max(0, limit); } if (avail >= amount) { return true; } return false; } /** * consume one available unit. */ public void consume() { consume(1); } /** * consume amount available units, amount could be a negative number * @param amount the number of units to consume */ public synchronized void consume(final long amount) { if (isBypass()) { return; } if (amount >= 0 ) { this.avail -= amount; if (this.avail < 0) { this.avail = 0; } } else { if (this.avail <= Long.MAX_VALUE + amount) { this.avail -= amount; this.avail = Math.min(this.avail, this.limit); } else { this.avail = this.limit; } } } /** * @return estimate of the ms required to wait before being able to provide 1 resource. */ public long waitInterval() { return waitInterval(1); } /** * @return estimate of the ms required to wait before being able to provide "amount" resources. */ public synchronized long waitInterval(final long amount) { // TODO Handle over quota? return (amount <= avail) ? 0 : getWaitInterval(getLimit(), avail, amount); } // These two method are for strictly testing purpose only public abstract void setNextRefillTime(long nextRefillTime); public abstract long getNextRefillTime(); }
/////////////////////////////////////////////////////////////////////////////// // Copyright (c) 2002, Eric D. Friedman All Rights Reserved. // Copyright (c) 2009, Robert D. Eden All Rights Reserved. // Copyright (c) 2009, Jeff Randall All Rights Reserved. // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. /////////////////////////////////////////////////////////////////////////////// package gnu.trove.decorator; import gnu.trove.map.TCharLongMap; import gnu.trove.iterator.TCharLongIterator; import java.io.*; import java.util.*; ////////////////////////////////////////////////// // THIS IS A GENERATED CLASS. DO NOT HAND EDIT! // ////////////////////////////////////////////////// /** * Wrapper class to make a TCharLongMap conform to the <tt>java.util.Map</tt> API. * This class simply decorates an underlying TCharLongMap and translates the Object-based * APIs into their Trove primitive analogs. * <p/> * Note that wrapping and unwrapping primitive values is extremely inefficient. If * possible, users of this class should override the appropriate methods in this class * and use a table of canonical values. * <p/> * Created: Mon Sep 23 22:07:40 PDT 2002 * * @author Eric D. Friedman * @author Robert D. Eden * @author Jeff Randall */ public class TCharLongMapDecorator extends AbstractMap<Character, Long> implements Map<Character, Long>, Externalizable, Cloneable { static final long serialVersionUID = 1L; /** the wrapped primitive map */ protected TCharLongMap _map; /** * FOR EXTERNALIZATION ONLY!! */ public TCharLongMapDecorator() {} /** * Creates a wrapper that decorates the specified primitive map. * * @param map the <tt>TCharLongMap</tt> to wrap. */ public TCharLongMapDecorator( TCharLongMap map ) { super(); this._map = map; } /** * Returns a reference to the map wrapped by this decorator. * * @return the wrapped <tt>TCharLongMap</tt> instance. */ public TCharLongMap getMap() { return _map; } /** * Inserts a key/value pair into the map. * * @param key an <code>Object</code> value * @param value an <code>Object</code> value * @return the previous value associated with <tt>key</tt>, * or Long(0) if none was found. */ public Long put( Character key, Long value ) { char k; long v; if ( key == null ) { k = _map.getNoEntryKey(); } else { k = unwrapKey( key ); } if ( value == null ) { v = _map.getNoEntryValue(); } else { v = unwrapValue( value ); } long retval = _map.put( k, v ); if ( retval == _map.getNoEntryValue() ) { return null; } return wrapValue( retval ); } /** * Retrieves the value for <tt>key</tt> * * @param key an <code>Object</code> value * @return the value of <tt>key</tt> or null if no such mapping exists. */ public Long get( Object key ) { char k; if ( key != null ) { if ( key instanceof Character ) { k = unwrapKey( key ); } else { return null; } } else { k = _map.getNoEntryKey(); } long v = _map.get( k ); // There may be a false positive since primitive maps // cannot return null, so we have to do an extra // check here. if ( v == _map.getNoEntryValue() ) { return null; } else { return wrapValue( v ); } } /** * Empties the map. */ public void clear() { this._map.clear(); } /** * Deletes a key/value pair from the map. * * @param key an <code>Object</code> value * @return the removed value, or null if it was not found in the map */ public Long remove( Object key ) { char k; if ( key != null ) { if ( key instanceof Character ) { k = unwrapKey( key ); } else { return null; } } else { k = _map.getNoEntryKey(); } long v = _map.remove( k ); // There may be a false positive since primitive maps // cannot return null, so we have to do an extra // check here. if ( v == _map.getNoEntryValue() ) { return null; } else { return wrapValue( v ); } } /** * Returns a Set view on the entries of the map. * * @return a <code>Set</code> value */ public Set<Map.Entry<Character,Long>> entrySet() { return new AbstractSet<Map.Entry<Character,Long>>() { public int size() { return _map.size(); } public boolean isEmpty() { return TCharLongMapDecorator.this.isEmpty(); } public boolean contains( Object o ) { if (o instanceof Map.Entry) { Object k = ( ( Map.Entry ) o ).getKey(); Object v = ( ( Map.Entry ) o ).getValue(); return TCharLongMapDecorator.this.containsKey(k) && TCharLongMapDecorator.this.get(k).equals(v); } else { return false; } } public Iterator<Map.Entry<Character,Long>> iterator() { return new Iterator<Map.Entry<Character,Long>>() { private final TCharLongIterator it = _map.iterator(); public Map.Entry<Character,Long> next() { it.advance(); final Character key = wrapKey( it.key() ); final Long v = wrapValue( it.value() ); return new Map.Entry<Character,Long>() { private Long val = v; public boolean equals( Object o ) { return o instanceof Map.Entry && ( ( Map.Entry ) o ).getKey().equals(key) && ( ( Map.Entry ) o ).getValue().equals(val); } public Character getKey() { return key; } public Long getValue() { return val; } public int hashCode() { return key.hashCode() + val.hashCode(); } public Long setValue( Long value ) { val = value; return put( key, value ); } }; } public boolean hasNext() { return it.hasNext(); } public void remove() { it.remove(); } }; } public boolean add( Map.Entry<Character,Long> o ) { throw new UnsupportedOperationException(); } public boolean remove( Object o ) { boolean modified = false; if ( contains( o ) ) { //noinspection unchecked Character key = ( ( Map.Entry<Character,Long> ) o ).getKey(); _map.remove( unwrapKey( key ) ); modified = true; } return modified; } public boolean addAll( Collection<? extends Map.Entry<Character, Long>> c ) { throw new UnsupportedOperationException(); } public void clear() { TCharLongMapDecorator.this.clear(); } }; } /** * Checks for the presence of <tt>val</tt> in the values of the map. * * @param val an <code>Object</code> value * @return a <code>boolean</code> value */ public boolean containsValue( Object val ) { return val instanceof Long && _map.containsValue( unwrapValue( val ) ); } /** * Checks for the present of <tt>key</tt> in the keys of the map. * * @param key an <code>Object</code> value * @return a <code>boolean</code> value */ public boolean containsKey( Object key ) { if ( key == null ) return _map.containsKey( _map.getNoEntryKey() ); return key instanceof Character && _map.containsKey( unwrapKey( key ) ); } /** * Returns the number of entries in the map. * * @return the map's size. */ public int size() { return this._map.size(); } /** * Indicates whether map has any entries. * * @return true if the map is empty */ public boolean isEmpty() { return size() == 0; } /** * Copies the key/value mappings in <tt>map</tt> into this map. * Note that this will be a <b>deep</b> copy, as storage is by * primitive value. * * @param map a <code>Map</code> value */ public void putAll( Map<? extends Character, ? extends Long> map ) { Iterator<? extends Entry<? extends Character,? extends Long>> it = map.entrySet().iterator(); for ( int i = map.size(); i-- > 0; ) { Entry<? extends Character,? extends Long> e = it.next(); this.put( e.getKey(), e.getValue() ); } } /** * Wraps a key * * @param k key in the underlying map * @return an Object representation of the key */ protected Character wrapKey( char k ) { return Character.valueOf( k ); } /** * Unwraps a key * * @param key wrapped key * @return an unwrapped representation of the key */ protected char unwrapKey( Object key ) { return ( ( Character ) key ).charValue(); } /** * Wraps a value * * @param k value in the underlying map * @return an Object representation of the value */ protected Long wrapValue( long k ) { return Long.valueOf( k ); } /** * Unwraps a value * * @param value wrapped value * @return an unwrapped representation of the value */ protected long unwrapValue( Object value ) { return ( ( Long ) value ).longValue(); } // Implements Externalizable public void readExternal( ObjectInput in ) throws IOException, ClassNotFoundException { // VERSION in.readByte(); // MAP _map = ( TCharLongMap ) in.readObject(); } // Implements Externalizable public void writeExternal( ObjectOutput out ) throws IOException { // VERSION out.writeByte(0); // MAP out.writeObject( _map ); } } // TCharLongHashMapDecorator
/* * Copyright (c) 2010-2015 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.schema; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.xml.bind.JAXBException; import javax.xml.namespace.QName; import com.evolveum.midpoint.prism.*; import com.evolveum.prism.xml.ns._public.types_3.PolyStringType; import org.apache.commons.lang.Validate; import com.evolveum.midpoint.prism.delta.ChangeType; import com.evolveum.midpoint.prism.delta.ItemDelta; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.util.RawTypeUtil; import com.evolveum.midpoint.prism.xnode.XNode; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.exception.SystemException; import com.evolveum.midpoint.xml.ns._public.common.api_types_3.ObjectDeltaListType; import com.evolveum.midpoint.xml.ns._public.common.api_types_3.ObjectModificationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectDeltaOperationType; import com.evolveum.prism.xml.ns._public.types_3.ChangeTypeType; import com.evolveum.prism.xml.ns._public.types_3.ItemDeltaType; import com.evolveum.prism.xml.ns._public.types_3.ItemPathType; import com.evolveum.prism.xml.ns._public.types_3.ModificationTypeType; import com.evolveum.prism.xml.ns._public.types_3.ObjectDeltaType; import com.evolveum.prism.xml.ns._public.types_3.ObjectType; import com.evolveum.prism.xml.ns._public.types_3.RawType; import org.jetbrains.annotations.NotNull; /** * @author semancik * */ public class DeltaConvertor { public static final QName PATH_ELEMENT_NAME = new QName(PrismConstants.NS_TYPES, "path"); public static <T extends Objectable> ObjectDelta<T> createObjectDelta(ObjectModificationType objectModification, Class<T> type, PrismContext prismContext) throws SchemaException { Validate.notNull(prismContext, "No prismContext in DeltaConvertor.createObjectDelta call"); PrismObjectDefinition<T> objectDefinition = prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(type); if (objectDefinition == null) { throw new SchemaException("No object definition for class "+type); } return createObjectDelta(objectModification, objectDefinition); } public static <T extends Objectable> ObjectDelta<T> createObjectDelta(ObjectModificationType objectModification, PrismObjectDefinition<T> objDef) throws SchemaException { ObjectDelta<T> objectDelta = new ObjectDelta<T>(objDef.getCompileTimeClass(), ChangeType.MODIFY, objDef.getPrismContext()); objectDelta.setOid(objectModification.getOid()); for (ItemDeltaType propMod : objectModification.getItemDelta()) { ItemDelta itemDelta = createItemDelta(propMod, objDef); objectDelta.addModification(itemDelta); } return objectDelta; } public static <T extends Objectable> ObjectDelta<T> createObjectDelta(ObjectDeltaType objectDeltaType, PrismContext prismContext, boolean allowRawValues) throws SchemaException { Validate.notNull(prismContext, "No prismContext in DeltaConvertor.createObjectDelta call"); QName objectType = objectDeltaType.getObjectType(); if (objectType == null) { throw new SchemaException("No objectType specified"); } PrismObjectDefinition<T> objDef = prismContext.getSchemaRegistry().findObjectDefinitionByType(objectType); Class<T> type = objDef.getCompileTimeClass(); if (objectDeltaType.getChangeType() == ChangeTypeType.ADD) { ObjectDelta<T> objectDelta = new ObjectDelta<T>(type, ChangeType.ADD, prismContext); objectDelta.setOid(objectDeltaType.getOid()); ObjectType objectToAddElement = objectDeltaType.getObjectToAdd(); // PrismObject<T> objectToAdd = prismContext.getXnodeProcessor().parseObject(objectToAddElement.getXnode()); // PrismObject<T> objectToAdd = prismContext.getJaxbDomHack().parseObjectFromJaxb(objectToAddElement); if (objectToAddElement != null) { objectDelta.setObjectToAdd(objectToAddElement.asPrismObject()); } return objectDelta; } else if (objectDeltaType.getChangeType() == ChangeTypeType.MODIFY) { ObjectDelta<T> objectDelta = new ObjectDelta<T>(type, ChangeType.MODIFY, prismContext); objectDelta.setOid(objectDeltaType.getOid()); for (ItemDeltaType propMod : objectDeltaType.getItemDelta()) { ItemDelta itemDelta = createItemDelta(propMod, objDef, allowRawValues); if (itemDelta != null){ objectDelta.addModification(itemDelta); } } return objectDelta; } else if (objectDeltaType.getChangeType() == ChangeTypeType.DELETE) { ObjectDelta<T> objectDelta = new ObjectDelta<T>(type, ChangeType.DELETE, prismContext); objectDelta.setOid(objectDeltaType.getOid()); return objectDelta; } else { throw new SchemaException("Unknown change type "+objectDeltaType.getChangeType()); } } public static <T extends Objectable> ObjectDelta<T> createObjectDelta(ObjectDeltaType objectDeltaType, PrismContext prismContext) throws SchemaException { return createObjectDelta(objectDeltaType, prismContext, false); } public static ObjectDeltaOperation createObjectDeltaOperation(ObjectDeltaOperationType objectDeltaOperationType, PrismContext prismContext) throws SchemaException { ObjectDeltaOperation retval = new ObjectDeltaOperation(createObjectDelta(objectDeltaOperationType.getObjectDelta(), prismContext)); if (objectDeltaOperationType.getExecutionResult() != null) { retval.setExecutionResult(OperationResult.createOperationResult(objectDeltaOperationType.getExecutionResult())); } if (objectDeltaOperationType.getObjectName() != null) { retval.setObjectName(objectDeltaOperationType.getObjectName().toPolyString()); } retval.setResourceOid(objectDeltaOperationType.getResourceOid()); if (objectDeltaOperationType.getResourceName() != null) { retval.setObjectName(objectDeltaOperationType.getResourceName().toPolyString()); } return retval; } public static <T extends Objectable> Collection<? extends ItemDelta> toModifications(ObjectModificationType objectModification, Class<T> type, PrismContext prismContext) throws SchemaException { Validate.notNull(prismContext, "No prismContext in DeltaConvertor.toModifications call"); PrismObjectDefinition<T> objectDefinition = prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(type); if (objectDefinition == null) { throw new SchemaException("No object definition for class "+type); } return toModifications(objectModification, objectDefinition); } public static <T extends Objectable> Collection<? extends ItemDelta> toModifications(ObjectModificationType objectModification, PrismObjectDefinition<T> objDef) throws SchemaException { return toModifications(objectModification.getItemDelta(), objDef); } public static <T extends Objectable> Collection<? extends ItemDelta> toModifications(Collection<ItemDeltaType> itemDeltaTypes, PrismObjectDefinition<T> objDef) throws SchemaException { Collection<ItemDelta> modifications = new ArrayList<ItemDelta>(); for (ItemDeltaType propMod : itemDeltaTypes) { ItemDelta itemDelta = createItemDelta(propMod, objDef); modifications.add(itemDelta); } return modifications; } /** * Converts this delta to ObjectModificationType (XML). */ public static <T extends Objectable> ObjectModificationType toObjectModificationType(ObjectDelta<T> delta) throws SchemaException { if (delta.getChangeType() != ChangeType.MODIFY) { throw new IllegalStateException("Cannot produce ObjectModificationType from delta of type " + delta.getChangeType()); } ObjectModificationType modType = new ObjectModificationType(); modType.setOid(delta.getOid()); List<ItemDeltaType> propModTypes = modType.getItemDelta(); for (ItemDelta<?,?> propDelta : delta.getModifications()) { Collection<ItemDeltaType> propPropModTypes; try { propPropModTypes = toItemDeltaTypes(propDelta); } catch (SchemaException e) { throw new SchemaException(e.getMessage() + " in " + delta.toString(), e); } propModTypes.addAll(propPropModTypes); } return modType; } public static ObjectDeltaType toObjectDeltaType(ObjectDelta<? extends ObjectType> objectDelta) throws SchemaException { return toObjectDeltaType(objectDelta, null); } public static ObjectDeltaType toObjectDeltaType(ObjectDelta<? extends ObjectType> objectDelta, DeltaConversionOptions options) throws SchemaException { Validate.notNull(objectDelta.getPrismContext(), "ObjectDelta without prismContext cannot be converted to ObjectDeltaType"); ObjectDeltaType objectDeltaType = new ObjectDeltaType(); objectDeltaType.setChangeType(convertChangeType(objectDelta.getChangeType())); Class<? extends Objectable> type = objectDelta.getObjectTypeClass(); PrismObjectDefinition<? extends Objectable> objDef = objectDelta.getPrismContext().getSchemaRegistry().findObjectDefinitionByCompileTimeClass(type); if (objDef == null) { throw new SchemaException("Unknown compile time class: " + type); } objectDeltaType.setObjectType(objDef.getTypeName()); objectDeltaType.setOid(objectDelta.getOid()); if (objectDelta.getChangeType() == ChangeType.ADD) { PrismObject<? extends ObjectType> prismObject = objectDelta.getObjectToAdd(); if (prismObject != null) { objectDeltaType.setObjectToAdd(prismObject.asObjectable()); } } else if (objectDelta.getChangeType() == ChangeType.MODIFY) { ObjectModificationType modType = new ObjectModificationType(); modType.setOid(objectDelta.getOid()); for (ItemDelta<?,?> propDelta : objectDelta.getModifications()) { Collection<ItemDeltaType> propPropModTypes; try { propPropModTypes = toItemDeltaTypes(propDelta, options); } catch (SchemaException e) { throw new SchemaException(e.getMessage() + " in " + objectDelta.toString(), e); } objectDeltaType.getItemDelta().addAll(propPropModTypes); } } else if (objectDelta.getChangeType() == ChangeType.DELETE) { // Nothing to do } else { throw new SystemException("Unknown changetype "+objectDelta.getChangeType()); } return objectDeltaType; } public static String toObjectDeltaTypeXml(ObjectDelta<? extends ObjectType> delta) throws SchemaException, JAXBException { return toObjectDeltaTypeXml(delta, null); } public static String toObjectDeltaTypeXml(ObjectDelta<? extends ObjectType> delta, DeltaConversionOptions options) throws SchemaException, JAXBException { Validate.notNull(delta.getPrismContext(), "ObjectDelta without prismContext cannot be converted to XML"); ObjectDeltaType objectDeltaType = toObjectDeltaType(delta, options); SerializationOptions serializationOptions = new SerializationOptions(); serializationOptions.setSerializeReferenceNames(DeltaConversionOptions.isSerializeReferenceNames(options)); return delta.getPrismContext().xmlSerializer().options(serializationOptions).serializeRealValue(objectDeltaType, SchemaConstants.T_OBJECT_DELTA); } public static ObjectDeltaOperationType toObjectDeltaOperationType(ObjectDeltaOperation objectDeltaOperation) throws SchemaException { return toObjectDeltaOperationType(objectDeltaOperation, null); } public static ObjectDeltaOperationType toObjectDeltaOperationType(ObjectDeltaOperation objectDeltaOperation, DeltaConversionOptions options) throws SchemaException { ObjectDeltaOperationType rv = new ObjectDeltaOperationType(); toObjectDeltaOperationType(objectDeltaOperation, rv, options); return rv; } public static void toObjectDeltaOperationType(ObjectDeltaOperation delta, ObjectDeltaOperationType odo, DeltaConversionOptions options) throws SchemaException { odo.setObjectDelta(DeltaConvertor.toObjectDeltaType(delta.getObjectDelta(), options)); if (delta.getExecutionResult() != null){ odo.setExecutionResult(delta.getExecutionResult().createOperationResultType()); } if (delta.getObjectName() != null) { odo.setObjectName(new PolyStringType(delta.getObjectName())); } odo.setResourceOid(delta.getResourceOid()); if (delta.getResourceName() != null) { odo.setResourceName(new PolyStringType(delta.getResourceName())); } } private static ChangeTypeType convertChangeType(ChangeType changeType) { if (changeType == ChangeType.ADD) { return ChangeTypeType.ADD; } if (changeType == ChangeType.MODIFY) { return ChangeTypeType.MODIFY; } if (changeType == ChangeType.DELETE) { return ChangeTypeType.DELETE; } throw new SystemException("Unknown changetype "+changeType); } /** * Creates delta from PropertyModificationType (XML). The values inside the PropertyModificationType are converted to java. * That's the reason this method needs schema and objectType (to locate the appropriate definitions). */ public static <IV extends PrismValue,ID extends ItemDefinition> ItemDelta<IV,ID> createItemDelta(ItemDeltaType propMod, Class<? extends Objectable> objectType, PrismContext prismContext) throws SchemaException { Validate.notNull("No prismContext in DeltaConvertor.createItemDelta call"); PrismObjectDefinition<? extends Objectable> objectDefinition = prismContext.getSchemaRegistry(). findObjectDefinitionByCompileTimeClass(objectType); return createItemDelta(propMod, objectDefinition); } public static <IV extends PrismValue,ID extends ItemDefinition> ItemDelta<IV,ID> createItemDelta(ItemDeltaType propMod, PrismContainerDefinition<?> pcDef, boolean allowRawValues) throws SchemaException { ItemPathType parentPathType = propMod.getPath(); ItemPath parentPath = null; if (parentPathType != null){ parentPath = parentPathType.getItemPath(); } else { throw new IllegalStateException("Path argument in the itemDelta HAVE TO BE specified."); } if (propMod.getValue() == null) { throw new IllegalArgumentException("No value in item delta (path: " + parentPath + ") while creating a property delta"); } ItemDefinition containingPcd = pcDef.findItemDefinition(parentPath); PrismContainerDefinition containerDef = null; if (containingPcd == null) { containerDef = pcDef.findContainerDefinition(parentPath.allUpToLastNamed()); if (containerDef == null){ if (allowRawValues){ return null; } throw new SchemaException("No definition for " + parentPath.allUpToLastNamed().lastNamed().getName() + " (while creating delta for " + pcDef + ")"); } } QName elementName = parentPath.lastNamed().getName(); Item item = RawTypeUtil.getParsedItem(containingPcd, propMod.getValue(), elementName, containerDef);//propMod.getValue().getParsedValue(containingPcd); ItemDelta<IV,ID> itemDelta = item.createDelta(parentPath); if (propMod.getModificationType() == ModificationTypeType.ADD) { itemDelta.addValuesToAdd(PrismValue.resetParentCollection(PrismValue.cloneCollection(item.getValues()))); } else if (propMod.getModificationType() == ModificationTypeType.DELETE) { itemDelta.addValuesToDelete(PrismValue.resetParentCollection(PrismValue.cloneCollection(item.getValues()))); } else if (propMod.getModificationType() == ModificationTypeType.REPLACE) { itemDelta.setValuesToReplace(PrismValue.resetParentCollection(PrismValue.cloneCollection(item.getValues()))); } if (!propMod.getEstimatedOldValue().isEmpty()) { Item oldItem = RawTypeUtil.getParsedItem(containingPcd, propMod.getEstimatedOldValue(), elementName, containerDef); itemDelta.addEstimatedOldValues(PrismValue.resetParentCollection(PrismValue.cloneCollection(oldItem.getValues()))); } return itemDelta; } public static <IV extends PrismValue,ID extends ItemDefinition> ItemDelta<IV,ID> createItemDelta(ItemDeltaType propMod, PrismContainerDefinition<?> pcDef) throws SchemaException { return createItemDelta(propMod, pcDef, false); } /** * Converts this delta to PropertyModificationType (XML). */ public static Collection<ItemDeltaType> toItemDeltaTypes(ItemDelta delta) throws SchemaException { return toItemDeltaTypes(delta, null); } public static Collection<ItemDeltaType> toItemDeltaTypes(ItemDelta delta, DeltaConversionOptions options) throws SchemaException { delta.checkConsistence(); if (!delta.isEmpty() && delta.getPrismContext() == null) { throw new IllegalStateException("Non-empty ItemDelta with no prismContext cannot be converted to ItemDeltaType."); } Collection<ItemDeltaType> mods = new ArrayList<>(); ItemPathType path = new ItemPathType(delta.getPath()); if (delta.getValuesToReplace() != null) { ItemDeltaType mod = new ItemDeltaType(); mod.setPath(path); mod.setModificationType(ModificationTypeType.REPLACE); try { addModValues(delta, mod, delta.getValuesToReplace(), options); } catch (SchemaException e) { throw new SchemaException(e.getMessage() + " while converting property " + delta.getElementName(), e); } addOldValues(delta, mod, delta.getEstimatedOldValues(), options); mods.add(mod); } if (delta.getValuesToAdd() != null) { ItemDeltaType mod = new ItemDeltaType(); mod.setPath(path); mod.setModificationType(ModificationTypeType.ADD); try { addModValues(delta, mod, delta.getValuesToAdd(), options); } catch (SchemaException e) { throw new SchemaException(e.getMessage() + " while converting property " + delta.getElementName(), e); } addOldValues(delta, mod, delta.getEstimatedOldValues(), options); mods.add(mod); } if (delta.getValuesToDelete() != null) { ItemDeltaType mod = new ItemDeltaType(); mod.setPath(path); mod.setModificationType(ModificationTypeType.DELETE); try { addModValues(delta, mod, delta.getValuesToDelete(), options); } catch (SchemaException e) { throw new SchemaException(e.getMessage() + " while converting property " + delta.getElementName(), e); } addOldValues(delta, mod, delta.getEstimatedOldValues(), options); mods.add(mod); } return mods; } // requires delta.prismContext to be set private static void addModValues(ItemDelta delta, ItemDeltaType mod, Collection<PrismValue> values, DeltaConversionOptions options) throws SchemaException { if (values == null || values.isEmpty()) { RawType modValue = new RawType(delta.getPrismContext()); mod.getValue().add(modValue); } else { for (PrismValue value : values) { XNode xnode = toXNode(delta, value, options); RawType modValue = new RawType(xnode, value.getPrismContext()); mod.getValue().add(modValue); } } } private static void addOldValues(ItemDelta delta, ItemDeltaType mod, Collection<PrismValue> values, DeltaConversionOptions options) throws SchemaException { if (values == null || values.isEmpty()) { RawType modValue = new RawType(delta.getPrismContext()); mod.getEstimatedOldValue().add(modValue); } else { for (PrismValue value : values) { XNode xnode = toXNode(delta, value, options); RawType modValue = new RawType(xnode, delta.getPrismContext()); mod.getEstimatedOldValue().add(modValue); } } } private static XNode toXNode(ItemDelta delta, @NotNull PrismValue value, DeltaConversionOptions options) throws SchemaException{ XNode node = delta.getPrismContext().xnodeSerializer() .definition(delta.getDefinition()) .options(DeltaConversionOptions.isSerializeReferenceNames(options) ? SerializationOptions.createSerializeReferenceNames() : null) .serialize(value) .getSubnode(); // TODO solve this within serializer! // if (delta.getDefinition() != null) { // node.setTypeQName(delta.getDefinition().getTypeName()); // node.setExplicitTypeDeclaration(true); // } return node; } public static Collection<ObjectDelta> createObjectDeltas(ObjectDeltaListType deltaList, PrismContext prismContext) throws SchemaException { List<ObjectDelta> retval = new ArrayList<>(); for (ObjectDeltaType deltaType : deltaList.getDelta()) { retval.add(createObjectDelta(deltaType, prismContext)); } return retval; } }
package com.example.qmain; import android.content.Context; import android.support.v7.app.AppCompatActivity; import org.w3c.dom.NodeList; import org.w3c.dom.Node; import org.w3c.dom.Element; import android.text.TextWatcher; import android.view.View; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.TextView; import android.text.Editable; import java.util.HashMap; import java.util.ArrayList; import java.util.List; import android.widget.Button; import android.widget.RadioGroup; import android.widget.RadioButton; import android.widget.LinearLayout; import android.app.AlertDialog; import android.widget.CheckBox; import android.graphics.drawable.Drawable; import android.support.v4.content.ContextCompat; import android.support.v7.app.ActionBar.LayoutParams; /** * Builds questions */ public class Questionnaire extends AppCompatActivity { public static AlertDialog.Builder builder = null; /** * Builds new question LinearLayout given information and updates passed data structures with question * * @param nNode XML node containing question and its parts * @param list2 list that needs to be updated with question (question must be added to list) * @param layout1 layout to add question to * @param context context of current activity * @param qns hash map of questions mapped to their numbers to be updated with new question being built * @param ds hash map mapping number of choices a question is dependent on that are positively answered to the question's id * @return new question LinearLayout containing all component views of question */ public static LinearLayout build_question(Node nNode, List<LinearLayout> list2, LinearLayout layout1, Context context, HashMap<String, LinearLayout> qns, HashMap<String, Integer> ds){ LinearLayout q = null; Element eElement = (Element) nNode; String text; String type = eElement.getElementsByTagName("qtype").item(0).getTextContent(); // type of question String hint; String parent; TextView reqQuestionTextView = null; // TextView for conditionally required questions with version of question text as required String req; String qid = eElement.getElementsByTagName("q").item(0).getTextContent(); // question number // see if question is required try{ req = eElement.getElementsByTagName("req").item(0).getTextContent(); }catch(Exception e){ req = "F"; } // question hint try{ hint = eElement.getElementsByTagName("qhint").item(0).getTextContent(); }catch(Exception e){ hint = ""; } if(req.equals("T")){ // adds "*" to required question's question text text = eElement.getElementsByTagName("qtext").item(0).getTextContent()+"*"; } else if(req.equals("C")){ // sets up required question version of question text text = eElement.getElementsByTagName("qtext").item(0).getTextContent(); String req_text = eElement.getElementsByTagName("qtext").item(0).getTextContent()+"*"; reqQuestionTextView = new TextView(context); reqQuestionTextView.setText(req_text); reqQuestionTextView.setVisibility(View.GONE); }else{ // reads and saves question text unchanged from XML node text = eElement.getElementsByTagName("qtext").item(0).getTextContent(); } try{ // check for parent, save if it exists parent = eElement.getElementsByTagName("parent").item(0).getTextContent(); }catch(Exception e){ parent = null; } String qlimit; try{ // check for limiting questions, save if exists qlimit = eElement.getElementsByTagName("qlimit").item(0).getTextContent(); }catch(Exception e){ qlimit = ""; } if (type.equals("T")) { // if text entry, build question with TextQ() q = TextQ(text, hint, context, parent, qns); } else if (type.equals("N")) { // if numerical entry, build question with NumQ() q = NumQ(text, hint, context, parent, qns, qlimit); } else if (type.equals("SC")) { // if single choice question, prepare list of choices and build question with SingleChoice() List<String> c = new ArrayList<>(); // array list of choices NodeList choices = eElement.getElementsByTagName("choice"); // all choice nodes HashMap<String, ArrayList<String>> dependencies = new HashMap<>(); // iterate through choice nodes for (int i = 0; i < choices.getLength(); i++) { Node choice = choices.item(i); Element e = (Element) choice; String x = e.getElementsByTagName("ctext").item(0).getTextContent(); // choice text String tag = e.getElementsByTagName("ccode").item(0).getTextContent(); // choice code x = x+"~~"+tag; c.add(x); // adds choice text and code to list of choices try { // keeps track of choice's dependents ArrayList<String> dps = new ArrayList<>(); NodeList dependents = e.getElementsByTagName("dependents"); // iterates through choice's dependents, if any for(int j = 0; j<dependents.getLength(); j++){ String dep_string = dependents.item(j).getTextContent(); dps.add(dep_string); } // maps choice's dependents to choice dependencies.put(x, dps); }catch(Exception ex){ //System.out.println("no dependencies"); } } builder = PVQ.builder; // dialog builder // build question with SingleChoice() q = SingleChoice(text, c, hint, context, builder, qns, ds, dependencies, parent); } else if (type.equals("MC")) { // if multiple choice question, prepare list of choices and build question with MultipleChoice() List<String> c = new ArrayList<>(); // array list of choices NodeList choices = eElement.getElementsByTagName("choice"); HashMap<String, ArrayList<String>> dependencies = new HashMap<>(); // iterates through choice nodes for (int i = 0; i < choices.getLength(); i++) { Node choice = choices.item(i); Element e = (Element) choice; String x = e.getElementsByTagName("ctext").item(0).getTextContent(); // choice text String code = e.getElementsByTagName("ccode").item(0).getTextContent(); // choice code x = x + "~~"+code; c.add(x); // adds choice text and code to list of choices try { ArrayList<String> dps = new ArrayList<>(); // list of choice's dependents NodeList dependents = e.getElementsByTagName("dependents"); // iterates through dependents, if any for(int j = 0; j<dependents.getLength(); j++){ String dep_string = dependents.item(j).getTextContent(); dps.add(dep_string); } // maps choice's dependents to choice dependencies.put(x, dps); }catch(Exception ex){ //System.out.println("no dependencies"); } } builder = PVQ.builder; // dialog builder // build question with MultipleChoice() q = MultipleChoice(text, c, hint, context, builder, qns, ds, dependencies, parent); } else if (type.equals("M")){ // method doesn't build map questions return null; } else if (type.equals("C")){ // method doesn't build camera questions return null; } else if (type.equals("S")){ // if sum question, builds question using SumQ() given list of factor nodes NodeList factors = eElement.getElementsByTagName("factor"); q = SumQ(text,hint,context,factors, parent, qns, qlimit); } else if (type.equals("P")){ // if parent question, builds question using ParentQ() q = ParentQ(text, hint, context, parent, qns); } // if question is conditionally required, adds TextView with required question text // so it may be shown if question becomes required if(reqQuestionTextView != null && q != null){ q.addView(reqQuestionTextView); reqQuestionTextView.setTag("required"); } // determines whether question is initially invisible or not String inv; try{ inv = eElement.getElementsByTagName("inv").item(0).getTextContent(); }catch(Exception e){ inv = "F"; } if(inv.equals("T") && q != null){ q.setVisibility(View.GONE); } // adds question to LinearLayout specified in parameters if(layout1 != null){ try { layout1.addView(q); }catch(IllegalStateException e){ //System.out.println("child question"); } } list2.add(q); if(qns != null){ qns.put(qid, q); // maps question to its number in hash map specified in parameters ds.put(qid, 0); // number of questions question qid depends on currently answered positively (0 initially for all) } // add invisible TextView to question containing its qid for future access TextView qt = new TextView(context); qt.setText(qid); qt.setVisibility(View.GONE); qt.setTag("qid"); if(q!=null) { q.addView(qt); } return q; } // Methods for each specific question type that make question linear layout for specific question type /** * Builds a text entry question with question text and an EditText for answer entry * * @param questiontext text of question * @param hint hint or prompt to go in the answer entry box * @param context context of current activity * @param parent number (in string format) of parent question, if any * @param qns hash map mapping question to question number * @return LinearLayout of text entry question with text and answer entry box */ public static LinearLayout TextQ(String questiontext, String hint, Context context, String parent, HashMap qns){ // sets up question text TextView text = new TextView(context); text.setTextSize(20); text.setText(questiontext); text.setPadding(0,0,0,5); // sets up box for answer text entry final EditText edittext = new EditText(context); edittext.setHint(hint); // sets up linear layout for question, adds question text and answer text box LinearLayout qlayout = new LinearLayout(context); qlayout.setOrientation(LinearLayout.VERTICAL); text.setTag("text"); edittext.setTag("answer"); qlayout.addView(text); qlayout.addView(edittext); qlayout.setTag("T"); // adds question to parent question LinearLayout, if one exists if(parent != null){ LinearLayout pl = (LinearLayout)qns.get(parent); pl.addView(qlayout); TextView parent_text = new TextView(context); parent_text.setText(((TextView)pl.findViewWithTag("parent text")).getText()); parent_text.setTag("parent text"); parent_text.setVisibility(View.GONE); qlayout.addView(parent_text); } return qlayout; } /** * Builds a number entry question with question text and an EditText for answer entry * * @param questiontext text of question * @param hint hint or prompt to go in the answer entry box * @param context context of current activity * @param parent number (in string format) of parent question, if any * @param qns hash map mapping question to question number * @param limit number (in string format) of question whose answer is an upper limit * @return LinearLayout of number entry question with text and answer entry box */ public static LinearLayout NumQ(String questiontext, String hint, Context context, String parent, HashMap<String, LinearLayout> qns, String limit){ // sets up question text TextView text = new TextView(context); text.setTextSize(20); text.setText(questiontext); text.setPadding(0,0,0,5); // sets up box for answer text entry (numerical) EditText edittext = new EditText(context); edittext.setHint(hint); edittext.setInputType(2); text.setTag("text"); edittext.setTag("answer"); // sets up NumQWatcher that verifies answer to this question is less than or equal to // answer of limiting question, if one exists if(!limit.equals("")) { edittext.addTextChangedListener(new NumQWatcher(edittext, limit, qns, context)); } // sets up linear layout for question, adds question text and answer text box LinearLayout qlayout = new LinearLayout(context); qlayout.setOrientation(LinearLayout.VERTICAL); qlayout.addView(text); qlayout.addView(edittext); qlayout.setTag("N"); // adds question to parent question LinearLayout, if one exists if(parent != null){ LinearLayout pl = qns.get(parent); pl.addView(qlayout); TextView parent_text = new TextView(context); parent_text.setText(((TextView)pl.findViewWithTag("parent text")).getText()); parent_text.setTag("parent text"); parent_text.setVisibility(View.GONE); qlayout.addView(parent_text); } return qlayout; } /** * Builds a sum question with question text, text and answer box for each factor, and text displaying total * * @param questiontext text of question * @param hint hint or prompt to go in the answer entry box * @param context context of current activity * @param factors list of factors as XML nodes * @param parent number (in string format) of parent question, if any * @param qns hash map mapping question to question number * @param qlimit number (in string format) of question whose answer is an upper limit * @return LinearLayout of sum question with text and sum factor texts and answer entry boxes */ public static LinearLayout SumQ(String questiontext, final String hint,Context context,NodeList factors, String parent, HashMap<String, LinearLayout> qns, String qlimit){ // sets up question text TextView text = new TextView(context); text.setTextSize(20); text.setText(questiontext); text.setTag("text"); text.setPadding(0,0,0,5); // sets up linear layout for question LinearLayout qlayout = new LinearLayout(context); qlayout.setOrientation(LinearLayout.VERTICAL); // sets up info button with hint if hint provided if(!hint.equals("")){ Button bt = new Button(context); // info button layout Drawable help = ContextCompat.getDrawable(context, R.drawable.help_circle_outline); bt.setBackground(help); bt.setLayoutParams(new LinearLayout.LayoutParams(50, 50)); // creates dialog with hint to be shown when info button clicked final AlertDialog.Builder bdr = builder; bt.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { bdr.setMessage(hint); AlertDialog dialog = bdr.create(); dialog.show(); } }); // formats info button and text to be aligned, adds to layout LinearLayout qh = new LinearLayout(context); qh.setOrientation(LinearLayout.HORIZONTAL); qh.addView(text); qh.addView(bt); qlayout.addView(qh); }else{ // if no info button, just adds text to layout qlayout.addView(text); } // sets up boxes for answer text entry (numerical) TextView tv = new TextView(context); List<EditText> to_sum = new ArrayList<>(); // list of EditTexts whose inputs should be summed to get the question total // iterates through factor XML nodes for(int i = 0;i<factors.getLength();i++){ Element factor = (Element) factors.item(i); String ftext = factor.getElementsByTagName("ftext").item(0).getTextContent(); // text of factor EditText et = new EditText(context); // answer entry box for factor et.setTextSize(15); et.setHint(" "); // sets SumWatcher for factor answer entry box if(qlimit.equals("")){ // if no other question serves as a limit et.addTextChangedListener(new SumWatcher(et, tv, to_sum, "", qns, null, context)); }else { // if another question serves as a limit et.addTextChangedListener(new SumWatcher(et, tv, to_sum, qlimit, qns, ftext+" ", context)); } et.setInputType(2); to_sum.add(et); // formatting factor text and answer box to be in a horizontal bar across from each other LinearLayout hbar = new LinearLayout(context); hbar.setOrientation(LinearLayout.HORIZONTAL); TextView ft = new TextView(context); ft.setTextSize(15); String ftext_space = ftext+" "; ft.setText(ftext_space); ft.setTag("ftext"); et.setTag("fanswer"); hbar.addView(ft); hbar.addView(et); hbar.setTag("factor"); qlayout.addView(hbar); } // TextView for displaying question total qlayout.addView(tv); tv.setTag("answer"); qlayout.setTag("S"); // adds question to parent question LinearLayout, if one exists if(parent != null){ LinearLayout pl = qns.get(parent); pl.addView(qlayout); TextView parent_text = new TextView(context); parent_text.setText(((TextView)pl.findViewWithTag("parent text")).getText()); parent_text.setTag("parent text"); parent_text.setVisibility(View.GONE); qlayout.addView(parent_text); } return qlayout; } /** * Builds a single choice question with question text and radio button choices * * @param questiontext text of question * @param choices list of choices, each a string of choice text and tag separated by a delimiter * @param hint hint or prompt to be shown when info button is clicked * @param context context of current activity * @param builder dialog builder used to display dialog when info button is clicked * @param qns hash map mapping question to question number * @param ds hash map mapping number of choices a question is dependent on that are positively answered to the question's id * @param lds hash map mapping list of dependent question numbers to choice texts * @param parent number (in string format) of parent question, if any * @return LinearLayout of single choice question with text and choices */ public static LinearLayout SingleChoice(String questiontext, List choices, final String hint, Context context, AlertDialog.Builder builder, HashMap qns, HashMap<String, Integer> ds, HashMap<String, ArrayList<String>> lds, String parent){ // sets up question text TextView text = new TextView(context); text.setTextSize(20); text.setText(questiontext); // creates group of radio buttons, each button being a choice from choices // each button tagged with choice code RadioGroup rg = new RadioGroup(context); for (int i=0; i<choices.size(); i++) { RadioButton rb = new RadioButton(rg.getContext()); String btext = choices.get(i).toString(); rb.setId(i); rb.setText(btext.substring(0, btext.indexOf("~~"))); // choice text in string before "~~" rg.addView(rb); rb.setTag(btext.substring(btext.indexOf("~~")+2)); // choice code in string after "~~" ArrayList<String> options = lds.get(btext); // list of questions (by qid) to set to visible if choice checked if(options == null){ options = new ArrayList<>(); } rb.setOnCheckedChangeListener(new onCheckedChangedB(qns, ds, options)); // sets dependent questions visible when rb checked } text.setTag("text"); text.setPadding(0,0,0,5); rg.setTag("choices"); // sets up info button, builds dialog with hint when clicked on Button bt = new Button(context); Drawable help = ContextCompat.getDrawable(context, R.drawable.help_circle_outline); bt.setBackground(help); bt.setLayoutParams(new LinearLayout.LayoutParams(50, 50)); final AlertDialog.Builder bdr = builder; bt.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { bdr.setMessage(hint); AlertDialog dialog = bdr.create(); dialog.show(); } }); // sets up question linear layout and adds all component views LinearLayout qlayout = new LinearLayout(context); qlayout.setOrientation(LinearLayout.VERTICAL); LinearLayout qh = new LinearLayout(context); qh.setOrientation(LinearLayout.HORIZONTAL); qh.addView(text); // adds info button if hint exists if(!hint.equals("")){ qh.addView(bt); } qlayout.addView(qh); qlayout.addView(rg); bt.setTag("button"); qlayout.setTag("SC"); // adds question to parent question LinearLayout, if one exists if(parent != null){ LinearLayout pl = (LinearLayout)qns.get(parent); pl.addView(qlayout); TextView parent_text = new TextView(context); parent_text.setText(((TextView)pl.findViewWithTag("parent text")).getText()); parent_text.setTag("parent text"); parent_text.setVisibility(View.GONE); qlayout.addView(parent_text); } return qlayout; } /** * Builds a multiple choice question with question text and checkbox choices * * @param questiontext text of question * @param choices list of choices, each a string of choice text and tag separated by a delimiter * @param hint hint or prompt to be shown when info button is clicked * @param context context of current activity * @param builder dialog builder used to display dialog when info button is clicked * @param qnums hash map mapping question to question number * @param ds hash map mapping number of currently positive answers question is dependent on to the question's id * @param localds hash map mapping list of dependent question numbers to choice texts * @param parent number (in string format) of parent question, if any * @return LinearLayout of multiple choice question with text and choices */ public static LinearLayout MultipleChoice(String questiontext, List choices, final String hint, Context context, AlertDialog.Builder builder, HashMap qnums, HashMap<String, Integer> ds, HashMap<String, ArrayList<String>> localds, String parent){ // sets up question text TextView text = new TextView(context); text.setTextSize(20); text.setText(questiontext); text.setPadding(0,0,0,5); // sets up question LinearLayout // adds horizontal LinearLayout where text and possibly info button will be added LinearLayout qlayout = new LinearLayout(context); qlayout.setOrientation(LinearLayout.VERTICAL); LinearLayout qh = new LinearLayout(context); qh.setOrientation(LinearLayout.HORIZONTAL); qlayout.addView(qh); // for each question choice, adds checkbox to question linear layout for (int i=0; i<choices.size(); i++) { CheckBox cb = new CheckBox(context); String ctext = choices.get(i).toString(); cb.setId(i); cb.setText(ctext.substring(0, ctext.indexOf("~~"))); // choice text in string before "~~" String tag = "choice" + ctext.substring(ctext.indexOf("~~")); // choice code in string after "~~", add code to tag cb.setTag(tag); cb.setLayoutParams(new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT)); ArrayList<String> options = localds.get(ctext); // list of questions (by qid) to set to visible if choice checked if(options == null){ options = new ArrayList<>(); } cb.setOnCheckedChangeListener(new onCheckedChangedB(qnums, ds, options)); // sets dependent questions visible when cb checked qlayout.addView(cb); } text.setTag("text"); // sets up info button, builds dialog with hint when clicked on Button bt = new Button(context); Drawable help = ContextCompat.getDrawable(context, R.drawable.help_circle_outline); bt.setBackground(help); bt.setLayoutParams(new LinearLayout.LayoutParams(50, 50)); final AlertDialog.Builder bdr = builder; bt.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { bdr.setMessage(hint); AlertDialog dialog = bdr.create(); dialog.show(); } }); bt.setTag("button"); // adds info button if hint exists qh.addView(text); if(!hint.equals("")){ qh.addView(bt); } qh.setTag("qh"); qlayout.setTag("MC"); // adds question to parent question LinearLayout, if one exists if(parent != null){ LinearLayout pl = (LinearLayout)qnums.get(parent); pl.addView(qlayout); TextView parent_text = new TextView(context); parent_text.setText(((TextView)pl.findViewWithTag("parent text")).getText()); parent_text.setTag("parent text"); parent_text.setVisibility(View.GONE); qlayout.addView(parent_text); } return qlayout; } /** * Builds a parent text LinearLayout with parent question text * * @param questiontext text of question * @param hint hint or prompt to be shown when info button is clicked * @param context context of current activity * @param parent number (in string format) of parent question, if any * @param qns hash map mapping question to question number * @return LinearLayout with parent question text that child questions can be added to */ public static LinearLayout ParentQ(String questiontext, final String hint, Context context, String parent, HashMap qns){ // sets up question LinearLayout LinearLayout qlayout = new LinearLayout(context); qlayout.setOrientation(LinearLayout.VERTICAL); // sets up question text TextView text = new TextView(context); text.setTextSize(20); text.setText(questiontext); text.setTag("text"); text.setPadding(0,0,0,5); qlayout.setTag("P"); // formats question text as parent text for use in writing questions/answers // adds as invisible TextView to layout so it can be accessed by other functions TextView parent_text = new TextView(context); String pp = questiontext+" - "; parent_text.setText(pp); parent_text.setTag("parent text"); parent_text.setVisibility(View.GONE); qlayout.addView(parent_text); // adds question to parent question LinearLayout, if one exists if(parent != null){ LinearLayout pl = (LinearLayout)qns.get(parent); pl.addView(qlayout); parent_text.setText(((TextView)pl.findViewWithTag("parent text")).getText()+questiontext+" - "); } if(!hint.equals("")) { // sets up info button, builds dialog with instructions when clicked on Button bt = new Button(context); Drawable help = ContextCompat.getDrawable(context, R.drawable.help_circle_outline); bt.setBackground(help); bt.setLayoutParams(new LinearLayout.LayoutParams(50, 50)); final AlertDialog.Builder bdr = builder; bt.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { bdr.setMessage(hint); AlertDialog dialog = bdr.create(); dialog.show(); } }); // add text and button to layout in horizontal bar LinearLayout qh = new LinearLayout(context); qh.setOrientation(LinearLayout.HORIZONTAL); qh.addView(text); qh.addView(bt); qlayout.addView(qh); }else{ // if no hint, just add text to layout qlayout.addView(text); } return qlayout; } /** * Returns Integer total of answer(s) from given limiting question(s) * * @param qnum number(s) (in string format) of limiting question(s) * @param numqs hash map mapping question to question number * @param factor text of factor whose limit is being queried (null if none) * @return current Integer answer or total answer of limiting question(s) */ public static int NumLimit(String qnum, HashMap<String, LinearLayout> numqs, String factor){ qnum = qnum.replace(" ",""); // if qnum contains multiple question numbers split by ",", lists them and iterates through them if(qnum.contains(",")){ String[] qnums = qnum.split(","); int total = 0; // total of limiting questions' answers for(String qn : qnums){ try{ LinearLayout qll = numqs.get(qn); // limiting question total += NumAnswer(qll, factor); }catch(Exception e){ e.printStackTrace(); } } return total; }else{ LinearLayout qll = numqs.get(qnum); // limiting question return NumAnswer(qll, factor); } } /** * Returns Integer answer of given question/question factor * * @param qll LinearLayout of question * @param factor String factor text of question factor (null if none) * @return current Integer answer of given question or factor */ public static int NumAnswer(LinearLayout qll, String factor){ if(factor!=null){ // looks for matching factor if factor is specified // iterates through all child views of question LinearLayout for(int v = 0; v<qll.getChildCount(); v++){ // checks for factors by checking tags (all factors are tagged "factor") if(qll.getChildAt(v).getTag().equals("factor")){ try{ // checks whether factor is factor specified by comparing factor texts TextView ftext = (TextView) qll.getChildAt(v).findViewWithTag("ftext"); if(ftext.getText().toString().equals(factor)){ // returns Integer version of current factor answer EditText et = (EditText) qll.getChildAt(v).findViewWithTag("fanswer"); if(et.getText().toString().equals("") || et.getText().toString().equals(" ")){ return 0; }else{ System.out.println(et.getText().toString()); } return Integer.parseInt(et.getText().toString()); } }catch(Exception e1){ e1.printStackTrace(); } } } } // if a factor is specified but not found in the limiting question, function moves on // returns limit on total question answer as limit on factor try{ // tries finding answer in a TextView (answers are in TextViews for sum questions) TextView answ = (TextView) qll.findViewWithTag("answer"); // sum questions answers are in format "Total: "+answer String s = answ.getText().toString(); s = s.substring(s.indexOf(" ")+1); // retrieves answer portion of string // parses and returns answer if(s.equals("")){ return 0; } s = s.replace(" ",""); return Integer.parseInt(s); }catch(Exception e){ e.printStackTrace(); // if answer not in TextView, other option is EditText // checks for EditText answer, parses and returns it EditText answ = (EditText) qll.findViewWithTag("answer"); String s = answ.getText().toString(); s = s.replace(" ",""); if(s.equals("")){ return 0; } return Integer.parseInt(s); } } } /** * SumWatcher helps keep track of sum question factor EditTexts, whether they are within their limits, and their totals */ class SumWatcher implements TextWatcher{ private TextView tv; // TextView of sum question displaying total private List factors; // list of sum question's factor EditTexts private String qlim; // number(s) of limiting question(s) in string format private HashMap<String, LinearLayout> qns; // hash map mapping questions to question numbers private String f; // text of specific question factor to be monitored (null if none) private AlertDialog dialog; // dialog to show in case of error private EditText et; // EditText of factor in question (null if none) /** * Initializes SumWatcher variables * * @param et EditText of factor in question (null if none) * @param tv TextView of sum question displaying total * @param factors list of sum question's factor EditTexts * @param qlimit number(s) of limiting question(s) in string format * @param qns hash map mapping questions to question numbers * @param factor text of specific question factor to be monitored (null if none) * @param context context of current activity */ SumWatcher(EditText et, TextView tv, List factors, String qlimit, HashMap<String, LinearLayout> qns, String factor, Context context){ this.tv = tv; this.factors = factors; this.qlim = qlimit; this.qns = qns; this.f = factor; this.et = et; // sets up error message dialog AlertDialog.Builder newbuilder = new AlertDialog.Builder(context); String msg = "Value too large, contradicts answer to question "+qlimit; newbuilder.setMessage(msg); this.dialog = newbuilder.create(); } /** * On changed answer in EditText, checks to see whether new value is valid, * updates sum question total if valid * * @param s Editable answer in EditText that has just changed */ public void afterTextChanged(Editable s) { int flim = -1; // limit of factor int tlim = -1; // limit of sum question total if(!qlim.equals("") && f!=null) { // gets factor limit if limiting questions and a factor were specified flim = Questionnaire.NumLimit(qlim, qns, f); } if(!qlim.equals("")){ // gets total limit if limiting questions were specified tlim = Questionnaire.NumLimit(qlim, qns, null); } // parses integer value of current EditText answer int sval; if(s.toString().equals("")){ sval = 0; }else { sval = Integer.parseInt(s.toString()); } // checks value against factor limit if factor limit exists if(sval > flim && flim >= 0){ dialog.show(); if(et!=null) { et.setText(""); } return; } // iterates through factor EditTexts and sums their answers int sum = 0; boolean zero = false; // if 0 is from user entry (true) or default and no answer (false) for(int i = 0;i<factors.size();i++){ String value = ((EditText)factors.get(i)).getText().toString(); if(value.equals("0")){ // zero is true if user entered a 0 and didn't just miss the question zero = true; } try { // adds factor answer to sum of sum question parts int v = Integer.parseInt(value); sum += v; if(sum > tlim && tlim > 0){ // checks to make sure sum does not exceed limit on question answer total ((EditText)factors.get(i)).setText(""); dialog.show(); break; } }catch(Exception e){ e.printStackTrace(); } } // updates answer TextView display either with blank answer or updated sum if(sum == 0 && !zero){ String settext = "Total: "; tv.setText(settext); tv.setTextSize(17); } else{ String total = Integer.toString(sum); String settext = "Total: "+total; tv.setText(settext); tv.setTextSize(17); } } public void beforeTextChanged(CharSequence s, int start, int count, int after) {} public void onTextChanged(CharSequence s, int start, int before, int count) {} } /** * onCheckedChangedB manages showing in and removing from view questions dependent on specific answer choices */ class onCheckedChangedB implements RadioButton.OnCheckedChangeListener{ private HashMap questions; // hash map mapping questions to question numbers private HashMap<String, Integer> dependents_map; // maps number of currently positive answers question is dependent on to the question's id private ArrayList<String> dependents; // list of qids of questions dependent on choice instance of onCheckedChangedB is monitoring /** * Initializes onCheckedChangedB variables * * @param qns hash map mapping questions to question numbers * @param deps mapping current number of choices a question is dependent on that are answered positively to the question's id * @param dependents list of qids of questions dependent on choice button instance of onCheckedChangedB is monitoring */ onCheckedChangedB(HashMap qns, HashMap<String, Integer> deps, ArrayList<String> dependents){ this.questions = qns; this.dependents_map = deps; this.dependents = dependents; } /** * Adds or removes questions dependent on an answer choice when choice answer changes * * @param b choice button instance of onCheckedChangedB is monitoring * @param isChecked whether or not choice button is checked */ @Override public void onCheckedChanged(CompoundButton b, boolean isChecked){ if(isChecked){ // when button is checked // iterates through choice's dependents for(String dependent: dependents){ // each dependent increases by one the number of answers it depends on answered positively dependent = dependent.replace(" ",""); int u = dependents_map.get(dependent); dependents_map.put(dependent, u+1); // dependent is set visible ((LinearLayout)questions.get(dependent)).setVisibility(View.VISIBLE); LinearLayout q = ((LinearLayout)questions.get(dependent)); try{ // if dependent is conditionally required // sets required question text as main question text, making question now required TextView rtv = (TextView)q.findViewWithTag("required"); TextView tv = (TextView) q.findViewWithTag("text"); String sw = tv.getText().toString(); tv.setText(rtv.getText()); rtv.setText(sw); }catch(Exception e){ System.out.println(e.getStackTrace()[0]); } } }else{ for(String dependent: dependents){ // each dependent decreases by one the number of answers it depends on answered positively int u = dependents_map.get(dependent); dependents_map.put(dependent, u-1); if(dependents_map.get(dependent).equals(0)) { // if a dependent has 0 answers it depends on answered positively // it is removed from view and it is made not required (switching required text for main text) LinearLayout q = ((LinearLayout)questions.get(dependent)); q.setVisibility(View.GONE); try{ TextView rtv = (TextView)q.findViewWithTag("required"); TextView tv = (TextView) q.findViewWithTag("text"); String sw = tv.getText().toString(); tv.setText(rtv.getText()); rtv.setText(sw); }catch(Exception e){ System.out.println(e.getStackTrace()[0]); } } } } } } /** * NumQWatcher checks whether a number question's answer is within specified limits every time it changes */ class NumQWatcher implements TextWatcher { private String q; // number(s) (in string format) of limiting question(s) private EditText et; // EditText whose answers are to be monitored private HashMap<String, LinearLayout> qns; // hash map mapping questions to question numbers private AlertDialog dialog; // dialog to show in case of error /** * Initializes NumQWatcher variables * * @param et EditText whose answers are to be monitored * @param l number(s) (in string format) of limiting question(s) * @param qns hash map mapping questions to question numbers * @param context context of current activity */ NumQWatcher(EditText et, String l, HashMap<String, LinearLayout> qns, Context context){ this.et = et; this.q = l; this.qns = qns; // sets up error message dialog AlertDialog.Builder newbuilder = new AlertDialog.Builder(context); String msg = "Value too large, contradicts answer to question "+l; newbuilder.setMessage(msg); this.dialog = newbuilder.create(); } /** * Checks whether current answer in EditText is within specified limits * * @param s Editable answer in EditText that was changed */ public void afterTextChanged(Editable s) { this.checkValid(et, q, qns, s); } public void beforeTextChanged(CharSequence s, int start, int count, int after) {} public void onTextChanged(CharSequence s, int start, int before, int count) {} /** * Checks whether current answer in EditText is less than or equal to limit set by limiting question(s) * * @param et EditText being monitored * @param q number(s) (in string format) of limiting question(s) * @param qns hash map mapping questions to question numbers * @param s current Editable answer in EditText that was changed */ private void checkValid(EditText et, String q, HashMap<String, LinearLayout> qns, Editable s){ int lim = Questionnaire.NumLimit(q, qns, null); // gets limit from Questionnaire.NumLimit() int val; // Integer value of EditText answer if(s.toString().equals("")){ // if no answer, trivially does not exceed the limit return; } try{ // parses Integer value of answer val = Integer.parseInt(s.toString()); System.out.println("successfully parsed"); if(val > lim){ // if greater than limit, shows error message and resets EditText to blank dialog.show(); et.setText(""); } }catch(Exception e){ et.setText(""); } } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import com.facebook.buck.core.build.buildable.context.BuildableContext; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.description.BuildRuleParams; import com.facebook.buck.core.exceptions.HumanReadableException; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.rulekey.AddToRuleKey; import com.facebook.buck.core.rulekey.AddsToRuleKey; import com.facebook.buck.core.rulekey.RuleKey; import com.facebook.buck.core.rulekey.RuleKeyAppendable; import com.facebook.buck.core.rulekey.RuleKeyObjectSink; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.BuildRuleResolver; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.core.rules.resolver.impl.TestBuildRuleResolver; import com.facebook.buck.core.sourcepath.ArchiveMemberSourcePath; import com.facebook.buck.core.sourcepath.ExplicitBuildTargetSourcePath; import com.facebook.buck.core.sourcepath.NonHashableSourcePathContainer; import com.facebook.buck.core.sourcepath.PathSourcePath; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolver; import com.facebook.buck.core.sourcepath.resolver.impl.DefaultSourcePathResolver; import com.facebook.buck.core.util.immutables.BuckStyleImmutable; import com.facebook.buck.core.util.immutables.BuckStylePackageVisibleImmutable; import com.facebook.buck.core.util.immutables.BuckStylePackageVisibleTuple; import com.facebook.buck.core.util.immutables.BuckStyleTuple; import com.facebook.buck.io.ArchiveMemberPath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.jvm.java.JavaLibraryBuilder; import com.facebook.buck.log.ConsoleHandler; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.rules.keys.AbstractRuleKeyBuilder; import com.facebook.buck.rules.keys.DefaultRuleKeyFactory; import com.facebook.buck.rules.keys.RuleKeyBuilder; import com.facebook.buck.rules.keys.RuleKeyDiagnostics.Result; import com.facebook.buck.rules.keys.RuleKeyFactory; import com.facebook.buck.rules.keys.RuleKeyResult; import com.facebook.buck.rules.keys.TestDefaultRuleKeyFactory; import com.facebook.buck.rules.keys.UncachedRuleKeyBuilder; import com.facebook.buck.rules.keys.hasher.StringRuleKeyHasher; import com.facebook.buck.step.Step; import com.facebook.buck.testutil.DummyFileHashCache; import com.facebook.buck.testutil.FakeFileHashCache; import com.facebook.buck.testutil.FakeProjectFilesystem; import com.facebook.buck.util.cache.FileHashCache; import com.facebook.buck.util.cache.FileHashCacheMode; import com.facebook.buck.util.cache.impl.DefaultFileHashCache; import com.facebook.buck.util.cache.impl.StackedFileHashCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.hash.HashCode; import com.google.common.util.concurrent.UncheckedExecutionException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.SortedSet; import javax.annotation.Nullable; import org.hamcrest.Matchers; import org.immutables.value.Value; import org.junit.Test; public class RuleKeyTest { @Test public void testRuleKeyFromHashString() { RuleKey ruleKey = new RuleKey("19d2558a6bd3a34fb3f95412de9da27ed32fe208"); assertEquals("19d2558a6bd3a34fb3f95412de9da27ed32fe208", ruleKey.toString()); } @Test(expected = HumanReadableException.class) public void shouldNotAllowPathsInRuleKeysWhenSetReflectively() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKeyBuilder<HashCode> builder = createBuilder(resolver, ruleFinder); builder.setReflectively("path", Paths.get("some/path")); } /** Ensure that build rules with the same inputs but different deps have unique RuleKeys. */ @Test public void testRuleKeyDependsOnDeps() throws Exception { FakeProjectFilesystem filesystem = new FakeProjectFilesystem(); FileHashCache hashCache = new StackedFileHashCache( ImmutableList.of( DefaultFileHashCache.createDefaultFileHashCache( filesystem, FileHashCacheMode.DEFAULT))); BuildRuleResolver ruleResolver1 = new TestBuildRuleResolver(); BuildRuleResolver ruleResolver2 = new TestBuildRuleResolver(); SourcePathRuleFinder ruleFinder1 = new SourcePathRuleFinder(ruleResolver1); DefaultRuleKeyFactory ruleKeyFactory = new TestDefaultRuleKeyFactory( hashCache, DefaultSourcePathResolver.from(ruleFinder1), ruleFinder1); SourcePathRuleFinder ruleFinder2 = new SourcePathRuleFinder(ruleResolver2); DefaultRuleKeyFactory ruleKeyFactory2 = new TestDefaultRuleKeyFactory( hashCache, DefaultSourcePathResolver.from(ruleFinder2), ruleFinder2); // Create a dependent build rule, //src/com/facebook/buck/cli:common. JavaLibraryBuilder builder = JavaLibraryBuilder.createBuilder( BuildTargetFactory.newInstance("//src/com/facebook/buck/cli:common")); BuildRule commonJavaLibrary = builder.build(ruleResolver1); builder.build(ruleResolver2); // Create a java_library() rule with no deps. Path mainSrc = Paths.get("src/com/facebook/buck/cli/Main.java"); filesystem.mkdirs(mainSrc.getParent()); filesystem.writeContentsToPath("hello", mainSrc); JavaLibraryBuilder javaLibraryBuilder = JavaLibraryBuilder.createBuilder( BuildTargetFactory.newInstance("//src/com/facebook/buck/cli:cli")) .addSrc(mainSrc); BuildRule libraryNoCommon = javaLibraryBuilder.build(ruleResolver1, filesystem); // Create the same java_library() rule, but with a dep on //src/com/facebook/buck/cli:common. javaLibraryBuilder.addDep(commonJavaLibrary.getBuildTarget()); BuildRule libraryWithCommon = javaLibraryBuilder.build(ruleResolver2, filesystem); // Assert that the RuleKeys are distinct. RuleKey r1 = ruleKeyFactory.build(libraryNoCommon); RuleKey r2 = ruleKeyFactory2.build(libraryWithCommon); assertThat( "Rule keys should be distinct because the deps of the rules are different.", r1, not(equalTo(r2))); } @Test public void ensureSimpleValuesCorrectRuleKeyChangesMade() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey reflective = createBuilder(resolver, ruleFinder) .setReflectively("long", 42L) .setReflectively("boolean", true) .setReflectively("path", FakeSourcePath.of("location/of/the/rebel/plans")) .build(RuleKey::new); RuleKey manual = createBuilder(resolver, ruleFinder) .setReflectively("long", 42L) .setReflectively("boolean", true) .setReflectively("path", FakeSourcePath.of("location/of/the/rebel/plans")) .build(RuleKey::new); assertEquals(manual, reflective); } @Test public void ensureTwoListsOfSameRuleKeyAppendablesHaveSameRuleKey() { ImmutableList<TestRuleKeyAppendable> ruleKeyAppendableList = ImmutableList.of(new TestRuleKeyAppendable("foo"), new TestRuleKeyAppendable("bar")); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey ruleKeyPairA = createBuilder(resolver, ruleFinder) .setReflectively("ruleKeyAppendableList", ruleKeyAppendableList) .build(RuleKey::new); RuleKey ruleKeyPairB = createBuilder(resolver, ruleFinder) .setReflectively("ruleKeyAppendableList", ruleKeyAppendableList) .build(RuleKey::new); assertEquals(ruleKeyPairA, ruleKeyPairB); } @Test public void ensureTwoListsOfDifferentRuleKeyAppendablesHaveDifferentRuleKeys() { ImmutableList<TestRuleKeyAppendable> ruleKeyAppendableListA = ImmutableList.of(new TestRuleKeyAppendable("foo"), new TestRuleKeyAppendable("bar")); ImmutableList<TestRuleKeyAppendable> ruleKeyAppendableListB = ImmutableList.of(new TestRuleKeyAppendable("bar"), new TestRuleKeyAppendable("foo")); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey ruleKeyPairA = createBuilder(resolver, ruleFinder) .setReflectively("ruleKeyAppendableList", ruleKeyAppendableListA) .build(RuleKey::new); RuleKey ruleKeyPairB = createBuilder(resolver, ruleFinder) .setReflectively("ruleKeyAppendableList", ruleKeyAppendableListB) .build(RuleKey::new); assertNotEquals(ruleKeyPairA, ruleKeyPairB); } @Test public void ensureTwoMapsOfSameRuleKeyAppendablesHaveSameRuleKey() { ImmutableMap<String, TestRuleKeyAppendable> ruleKeyAppendableMap = ImmutableMap.of( "foo", new TestRuleKeyAppendable("foo"), "bar", new TestRuleKeyAppendable("bar")); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey ruleKeyPairA = createBuilder(resolver, ruleFinder) .setReflectively("ruleKeyAppendableMap", ruleKeyAppendableMap) .build(RuleKey::new); RuleKey ruleKeyPairB = createBuilder(resolver, ruleFinder) .setReflectively("ruleKeyAppendableMap", ruleKeyAppendableMap) .build(RuleKey::new); assertEquals(ruleKeyPairA, ruleKeyPairB); } @Test public void ensureTwoMapsOfDifferentRuleKeyAppendablesHaveDifferentRuleKeys() { ImmutableMap<String, TestRuleKeyAppendable> ruleKeyAppendableMapA = ImmutableMap.of( "foo", new TestRuleKeyAppendable("foo"), "bar", new TestRuleKeyAppendable("bar")); ImmutableMap<String, TestRuleKeyAppendable> ruleKeyAppendableMapB = ImmutableMap.of( "bar", new TestRuleKeyAppendable("bar"), "foo", new TestRuleKeyAppendable("foo")); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey ruleKeyPairA = createBuilder(resolver, ruleFinder) .setReflectively("ruleKeyAppendableMap", ruleKeyAppendableMapA) .build(RuleKey::new); RuleKey ruleKeyPairB = createBuilder(resolver, ruleFinder) .setReflectively("ruleKeyAppendableMap", ruleKeyAppendableMapB) .build(RuleKey::new); assertNotEquals(ruleKeyPairA, ruleKeyPairB); } @Test public void ensureListsAreHandledProperly() { ImmutableList<SourceRoot> sourceroots = ImmutableList.of(new SourceRoot("cake")); ImmutableList<String> strings = ImmutableList.of("one", "two"); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey reflective = createBuilder(resolver, ruleFinder) .setReflectively("sourceroot", sourceroots) .setReflectively("strings", strings) .build(RuleKey::new); RuleKey manual = createBuilder(resolver, ruleFinder) .setReflectively("sourceroot", sourceroots) .setReflectively("strings", strings) .build(RuleKey::new); assertEquals(manual, reflective); } @Test public void differentSeedsMakeDifferentKeys() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); BuildTarget buildTarget = BuildTargetFactory.newInstance("//some:example"); BuildRule buildRule = new FakeBuildRule(buildTarget); RuleKey empty1 = new TestDefaultRuleKeyFactory(new DummyFileHashCache(), resolver, ruleFinder) .build(buildRule); RuleKey empty2 = new TestDefaultRuleKeyFactory(new DummyFileHashCache(), resolver, ruleFinder) .build(buildRule); RuleKey empty3 = new TestDefaultRuleKeyFactory(1, new DummyFileHashCache(), resolver, ruleFinder) .build(buildRule); assertThat(empty1, is(equalTo(empty2))); assertThat(empty1, is(not(equalTo(empty3)))); } @Test public void testRuleKeyEqualsAndHashCodeMethods() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey keyPair1 = createBuilder(resolver, ruleFinder).setReflectively("something", "foo").build(RuleKey::new); RuleKey keyPair2 = createBuilder(resolver, ruleFinder).setReflectively("something", "foo").build(RuleKey::new); RuleKey keyPair3 = createBuilder(resolver, ruleFinder).setReflectively("something", "bar").build(RuleKey::new); assertEquals(keyPair1, keyPair2); assertEquals(keyPair1.hashCode(), keyPair2.hashCode()); assertNotEquals(keyPair1, keyPair3); assertNotEquals(keyPair1.hashCode(), keyPair3.hashCode()); assertNotEquals(keyPair2, keyPair3); assertNotEquals(keyPair2.hashCode(), keyPair3.hashCode()); } @Test public void setInputPathSourcePath() { ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); // Changing the name of a named source path should change the hash... assertNotEquals( buildResult( createBuilder(resolver, ruleFinder) .setReflectively( "key", PathSourcePath.of(projectFilesystem, Paths.get("something")))), buildResult( createBuilder(resolver, ruleFinder) .setReflectively( "key", PathSourcePath.of(projectFilesystem, Paths.get("something", "else"))))); // ... as should changing the key assertNotEquals( buildResult( createBuilder(resolver, ruleFinder) .setReflectively( "key", PathSourcePath.of(projectFilesystem, Paths.get("something")))), buildResult( createBuilder(resolver, ruleFinder) .setReflectively( "different-key", PathSourcePath.of(projectFilesystem, Paths.get("something"))))); } @Test public void setNonHashingSourcePathsWithDifferentRelativePaths() { ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); PathSourcePath sourcePathOne = FakeSourcePath.of(projectFilesystem, "something"); PathSourcePath sourcePathTwo = FakeSourcePath.of(projectFilesystem, "something2"); // Changing the relative path should change the rule key SourcePathRuleFinder ruleFinder1 = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathRuleFinder ruleFinder2 = new SourcePathRuleFinder(new TestBuildRuleResolver()); assertNotEquals( buildResult( createBuilder(DefaultSourcePathResolver.from(ruleFinder1), ruleFinder1) .setReflectively("key", new NonHashableSourcePathContainer(sourcePathOne))), buildResult( createBuilder(DefaultSourcePathResolver.from(ruleFinder2), ruleFinder2) .setReflectively("key", new NonHashableSourcePathContainer(sourcePathTwo)))); } @Test public void setInputBuildTargetSourcePath() { BuildRuleResolver resolver = new TestBuildRuleResolver(); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); FakeBuildRule fake1 = new FakeBuildRule("//:fake1"); FakeBuildRule fake2 = new FakeBuildRule("//:fake2"); resolver.addToIndex(fake1); resolver.addToIndex(fake2); // Verify that two BuildTargetSourcePaths with the same rule and path are equal. assertEquals( buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ExplicitBuildTargetSourcePath.of( fake1.getBuildTarget(), Paths.get("location")))), buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ExplicitBuildTargetSourcePath.of( fake1.getBuildTarget(), Paths.get("location"))))); // Verify that just changing the path of the build rule changes the rule key. assertNotEquals( buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ExplicitBuildTargetSourcePath.of( fake1.getBuildTarget(), Paths.get("location")))), buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ExplicitBuildTargetSourcePath.of( fake1.getBuildTarget(), Paths.get("different"))))); // Verify that just changing the build rule rule key changes the calculated rule key. assertNotEquals( buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ExplicitBuildTargetSourcePath.of( fake1.getBuildTarget(), Paths.get("location")))), buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ExplicitBuildTargetSourcePath.of( fake2.getBuildTarget(), Paths.get("location"))))); // Verify that just changing the key changes the calculated rule key. assertNotEquals( buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ExplicitBuildTargetSourcePath.of( fake1.getBuildTarget(), Paths.get("location")))), buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "different-key", ExplicitBuildTargetSourcePath.of( fake1.getBuildTarget(), Paths.get("location"))))); } @Test public void setInputArchiveMemberSourcePath() { BuildRuleResolver resolver = new TestBuildRuleResolver(); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); FakeBuildRule fakeBuildRule = new FakeBuildRule("//:fake"); resolver.addToIndex(fakeBuildRule); ExplicitBuildTargetSourcePath archive1 = ExplicitBuildTargetSourcePath.of(fakeBuildRule.getBuildTarget(), Paths.get("location")); PathSourcePath archive2 = FakeSourcePath.of("otherLocation"); // Verify that two ArchiveMemberSourcePaths with the same archive and path assertEquals( buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ArchiveMemberSourcePath.of(archive1, Paths.get("location")))), buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ArchiveMemberSourcePath.of(archive1, Paths.get("location"))))); // Verify that just changing the archive changes the rule key assertNotEquals( buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ArchiveMemberSourcePath.of(archive1, Paths.get("location")))), buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ArchiveMemberSourcePath.of(archive2, Paths.get("location"))))); // Verify that just changing the member path changes the rule key assertNotEquals( buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ArchiveMemberSourcePath.of(archive1, Paths.get("location")))), buildResult( createBuilder(pathResolver, ruleFinder) .setReflectively( "key", ArchiveMemberSourcePath.of(archive1, Paths.get("different"))))); } @Test public void canAddMapsToRuleKeys() { ImmutableMap<String, ?> map = ImmutableMap.of("path", FakeSourcePath.of("some/path"), "boolean", true); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey key = createBuilder(resolver, ruleFinder).setReflectively("map", map).build(RuleKey::new); assertNotNull(key); } @Test public void keysOfMapsAddedToRuleKeysDoNotNeedToBeStrings() { ImmutableMap<?, ?> map = ImmutableMap.of( FakeSourcePath.of("some/path"), "woohoo!", 42L, "life, the universe and everything"); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey key = createBuilder(resolver, ruleFinder).setReflectively("map", map).build(RuleKey::new); assertNotNull(key); } @Test public void canAddRuleKeyAppendable() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey key = createBuilder(resolver, ruleFinder) .setReflectively("rule_key_appendable", new TestRuleKeyAppendable("foo")) .build(RuleKey::new); assertNotNull(key); } @Test public void canAddListOfRuleKeyAppendable() { ImmutableList<TestRuleKeyAppendable> list = ImmutableList.of(new TestRuleKeyAppendable("foo"), new TestRuleKeyAppendable("bar")); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey key = createBuilder(resolver, ruleFinder).setReflectively("list", list).build(RuleKey::new); assertNotNull(key); } @Test public void canAddMapOfRuleKeyAppendable() { ImmutableMap<String, TestRuleKeyAppendable> map = ImmutableMap.of( "foo", new TestRuleKeyAppendable("foo"), "bar", new TestRuleKeyAppendable("bar")); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey key = createBuilder(resolver, ruleFinder).setReflectively("map", map).build(RuleKey::new); assertNotNull(key); } @Test public void changingRuleKeyFieldChangesKeyWhenClassImplementsAppendToRuleKey() { BuildTarget target = BuildTargetFactory.newInstance("//cheese:peas"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuildRuleParams params = TestBuildRuleParams.create(); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); FileHashCache hashCache = new StackedFileHashCache( ImmutableList.of( DefaultFileHashCache.createDefaultFileHashCache( new FakeProjectFilesystem(), FileHashCacheMode.DEFAULT))); BuildRule buildRule1 = new TestRuleKeyAppendableBuildRule(target, projectFilesystem, params, "foo", "bar"); BuildRule buildRule2 = new TestRuleKeyAppendableBuildRule(target, projectFilesystem, params, "foo", "xyzzy"); RuleKey ruleKey1 = new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder).build(buildRule1); RuleKey ruleKey2 = new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder).build(buildRule2); assertNotEquals(ruleKey1, ruleKey2); } @Test public void ruleKeyIncludesClass() { class AddsToRuleKey1 implements AddsToRuleKey {} class AddsToRuleKey2 implements AddsToRuleKey {} class SimpleBuildRule extends AbstractBuildRule { @AddToRuleKey final AddsToRuleKey value; protected SimpleBuildRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, AddsToRuleKey value) { super(buildTarget, projectFilesystem); this.value = value; } @Override public SortedSet<BuildRule> getBuildDeps() { return ImmutableSortedSet.of(); } @Override public ImmutableList<? extends Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { return ImmutableList.of(); } @Nullable @Override public SourcePath getSourcePathToOutput() { return null; } } BuildTarget target = BuildTargetFactory.newInstance("//cheese:peas"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); FileHashCache hashCache = new StackedFileHashCache( ImmutableList.of( DefaultFileHashCache.createDefaultFileHashCache( new FakeProjectFilesystem(), FileHashCacheMode.DEFAULT))); RuleKey ruleKey1 = new TestDefaultRuleKeyFactory(0, hashCache, pathResolver, ruleFinder) .build(new SimpleBuildRule(target, projectFilesystem, new AddsToRuleKey1())); RuleKey ruleKey2 = new TestDefaultRuleKeyFactory(0, hashCache, pathResolver, ruleFinder) .build(new SimpleBuildRule(target, projectFilesystem, new AddsToRuleKey2())); assertNotEquals(ruleKey1, ruleKey2); } @Test public void changingRuleKeyFieldOfDepChangesKeyWhenClassImplementsAppendToRuleKey() { BuildTarget target = BuildTargetFactory.newInstance("//cheese:peas"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuildRuleParams params = TestBuildRuleParams.create(); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); FileHashCache hashCache = new StackedFileHashCache( ImmutableList.of( DefaultFileHashCache.createDefaultFileHashCache( new FakeProjectFilesystem(), FileHashCacheMode.DEFAULT))); BuildRule buildRule1 = new TestRuleKeyAppendableBuildRule(target, projectFilesystem, params, "foo", "bar"); BuildRule buildRule2 = new TestRuleKeyAppendableBuildRule(target, projectFilesystem, params, "foo", "xyzzy"); BuildTarget parentTarget = BuildTargetFactory.newInstance("//cheese:milk"); BuildRuleParams parentParams1 = TestBuildRuleParams.create().withDeclaredDeps(ImmutableSortedSet.of(buildRule1)); BuildRule parentRule1 = new NoopBuildRuleWithDeclaredAndExtraDeps(parentTarget, projectFilesystem, parentParams1); BuildRuleParams parentParams2 = TestBuildRuleParams.create().withDeclaredDeps(ImmutableSortedSet.of(buildRule2)); BuildRule parentRule2 = new NoopBuildRuleWithDeclaredAndExtraDeps(parentTarget, projectFilesystem, parentParams2); RuleKey ruleKey1 = new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder).build(parentRule1); RuleKey ruleKey2 = new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder).build(parentRule2); assertNotEquals(ruleKey1, ruleKey2); } @Test public void subclassWithNoopSetter() { class NoopSetterRuleKeyBuilder extends UncachedRuleKeyBuilder { public NoopSetterRuleKeyBuilder( SourcePathRuleFinder ruleFinder, SourcePathResolver pathResolver, FileHashCache hashCache, RuleKeyFactory<RuleKey> defaultRuleKeyFactory) { super(ruleFinder, pathResolver, hashCache, defaultRuleKeyFactory); } @Override protected NoopSetterRuleKeyBuilder setSourcePath(SourcePath sourcePath) { return this; } } SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver pathResolver = DefaultSourcePathResolver.from(ruleFinder); FileHashCache hashCache = new FakeFileHashCache(ImmutableMap.of()); RuleKeyFactory<RuleKey> ruleKeyFactory = new TestDefaultRuleKeyFactory(hashCache, pathResolver, ruleFinder); RuleKey nullRuleKey = new NoopSetterRuleKeyBuilder(ruleFinder, pathResolver, hashCache, ruleKeyFactory) .build(RuleKey::new); RuleKey noopRuleKey = new NoopSetterRuleKeyBuilder(ruleFinder, pathResolver, hashCache, ruleKeyFactory) .setReflectively("key", FakeSourcePath.of("value")) .build(RuleKey::new); assertThat(noopRuleKey, is(equalTo(nullRuleKey))); } @Test public void declaredDepsAndExtraDepsGenerateDifferentRuleKeys() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder); FileHashCache hashCache = new FakeFileHashCache(ImmutableMap.of()); DefaultRuleKeyFactory ruleKeyFactory = new TestDefaultRuleKeyFactory(hashCache, sourcePathResolver, ruleFinder); BuildTarget target = BuildTargetFactory.newInstance("//a:target"); BuildTarget depTarget = BuildTargetFactory.newInstance("//some:dep"); ProjectFilesystem projectFilesystem = new FakeProjectFilesystem(); BuildRuleParams depParams = TestBuildRuleParams.create(); NoopBuildRuleWithDeclaredAndExtraDeps dep = new NoopBuildRuleWithDeclaredAndExtraDeps(depTarget, projectFilesystem, depParams); BuildRuleParams paramsWithDeclaredDep = TestBuildRuleParams.create().withDeclaredDeps(ImmutableSortedSet.of(dep)); NoopBuildRuleWithDeclaredAndExtraDeps ruleWithDeclaredDep = new NoopBuildRuleWithDeclaredAndExtraDeps(target, projectFilesystem, paramsWithDeclaredDep); BuildRuleParams paramsWithExtraDep = TestBuildRuleParams.create().withExtraDeps(ImmutableSortedSet.of(dep)); NoopBuildRuleWithDeclaredAndExtraDeps ruleWithExtraDep = new NoopBuildRuleWithDeclaredAndExtraDeps(target, projectFilesystem, paramsWithExtraDep); BuildRuleParams paramsWithBothDeps = TestBuildRuleParams.create() .withDeclaredDeps(ImmutableSortedSet.of(dep)) .withExtraDeps(ImmutableSortedSet.of(dep)); NoopBuildRuleWithDeclaredAndExtraDeps ruleWithBothDeps = new NoopBuildRuleWithDeclaredAndExtraDeps(target, projectFilesystem, paramsWithBothDeps); assertNotEquals( ruleKeyFactory.build(ruleWithDeclaredDep), ruleKeyFactory.build(ruleWithExtraDep)); assertNotEquals( ruleKeyFactory.build(ruleWithDeclaredDep), ruleKeyFactory.build(ruleWithBothDeps)); assertNotEquals(ruleKeyFactory.build(ruleWithExtraDep), ruleKeyFactory.build(ruleWithBothDeps)); } @Test public void immutablesCanAddValueMethodsFromInterfaceImmutablesToRuleKeys() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey first = createBuilder(resolver, ruleFinder) .setReflectively("value", TestRuleKeyInterfaceImmutable.of("added-1", "ignored-1")) .build(RuleKey::new); RuleKey second = createBuilder(resolver, ruleFinder) .setReflectively("value", TestRuleKeyInterfaceImmutable.of("added-1", "ignored-2")) .build(RuleKey::new); RuleKey third = createBuilder(resolver, ruleFinder) .setReflectively("value", TestRuleKeyInterfaceImmutable.of("added-2", "ignored-2")) .build(RuleKey::new); assertEquals(first, second); assertNotEquals(first, third); } @Test public void lambdaAddsPseudoClassName() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); Result<RuleKey, String> result = createFactory(resolver, ruleFinder) .buildForDiagnostics((RuleKeyAppendable) (sink) -> {}, new StringRuleKeyHasher()); assertThat( result.diagKey, Matchers.containsString( "string(\"com.facebook.buck.rules.RuleKeyTest$?????\"):key(.class)")); } @Test public void anonymousClassAddsPseudoClassName() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); Result<RuleKey, String> result = createFactory(resolver, ruleFinder) .buildForDiagnostics(new AddsToRuleKey() {}, new StringRuleKeyHasher()); assertThat( result.diagKey, Matchers.containsString( "string(\"com.facebook.buck.rules.RuleKeyTest$?????\"):key(.class)")); } @Value.Immutable @BuckStyleTuple interface AbstractTestRuleKeyInterfaceImmutable extends AddsToRuleKey { @AddToRuleKey String getRuleKeyValue(); String getNonRuleKeyValue(); } @Test public void immutablesCanAddNonDefaultImmutableValues() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey first = createBuilder(resolver, ruleFinder) .setReflectively("value", TestRuleKeyImmutableWithDefaults.builder().build()) .build(RuleKey::new); RuleKey second = createBuilder(resolver, ruleFinder) .setReflectively( "value", TestRuleKeyImmutableWithDefaults.builder().setRuleKeyValue("other").build()) .build(RuleKey::new); assertNotEquals(first, second); } @Value.Immutable @BuckStyleImmutable abstract static class AbstractTestRuleKeyImmutableWithDefaults implements AddsToRuleKey { @AddToRuleKey @Value.Default String getRuleKeyValue() { return "default"; } } @Test public void immutablesCanAddValueMethodsFromExtendedInterfaceImmutablesToRuleKeys() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); RuleKey first = createBuilder(resolver, ruleFinder) .setReflectively("value", TestRuleKeyAbstractImmutable.of("added-1", "ignored-1")) .build(RuleKey::new); RuleKey second = createBuilder(resolver, ruleFinder) .setReflectively("value", TestRuleKeyAbstractImmutable.of("added-1", "ignored-2")) .build(RuleKey::new); RuleKey third = createBuilder(resolver, ruleFinder) .setReflectively("value", TestRuleKeyAbstractImmutable.of("added-2", "ignored-2")) .build(RuleKey::new); assertEquals(first, second); assertNotEquals(first, third); } @Value.Immutable @BuckStylePackageVisibleTuple abstract static class AbstractTestPackageVisibleTuple implements AddsToRuleKey { @AddToRuleKey abstract int getValue(); } @Value.Immutable @BuckStylePackageVisibleImmutable abstract static class AbstractTestPackageVisibleImmutable implements AddsToRuleKey { @AddToRuleKey abstract int getValue(); } @Test public void packageVisibleImmutablesCanUseAddToRuleKey() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); createBuilder(resolver, ruleFinder) .setReflectively("value", TestPackageVisibleTuple.of(0)) .build(RuleKey::new); createBuilder(resolver, ruleFinder) .setReflectively("value", TestPackageVisibleImmutable.builder().setValue(0).build()) .build(RuleKey::new); } @Value.Immutable @BuckStyleTuple abstract static class AbstractTestRuleKeyAbstractImmutable implements AddsToRuleKey { @AddToRuleKey abstract String getRuleKeyValue(); abstract String getNonRuleKeyValue(); } @Test(expected = UncheckedExecutionException.class) public void badUseOfAddValueMethodsToRuleKey() { java.util.logging.Logger.getGlobal().addHandler(new ConsoleHandler()); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); createBuilder(resolver, ruleFinder) .setReflectively("value", (BadUseOfAddValueMethodsToRuleKey) () -> "") .build(RuleKey::new); } interface BadUseOfAddValueMethodsToRuleKey extends AddsToRuleKey { @AddToRuleKey String whatever(); } interface EmptyInterface {} interface ExtendsBadUseAndOther extends EmptyInterface, BadUseOfAddValueMethodsToRuleKey {} abstract class EmptyClass {} abstract class ExtendsFurtherBadUseAndOther extends EmptyClass implements EmptyInterface, ExtendsBadUseAndOther {} @Test(expected = UncheckedExecutionException.class) public void badUseOfAddValueMethodsToRuleKeyInHierarchy() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); createBuilder(resolver, ruleFinder) .setReflectively("value", new ClassWithBadThingInHierarchy()) .build(RuleKey::new); } class ClassWithBadThingInHierarchy extends ExtendsFurtherBadUseAndOther { @Override public String whatever() { return null; } } @Test(expected = UncheckedExecutionException.class) public void badUseOfAddValueMethodsToRuleKeyInSomeSuperInterface() { SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(new TestBuildRuleResolver()); SourcePathResolver resolver = DefaultSourcePathResolver.from(ruleFinder); createBuilder(resolver, ruleFinder) .setReflectively( "value", new DerivedFromImplementsBadUseOfAddValueMethodsToRuleKey() { @Override public String whatever() { return null; } }) .build(RuleKey::new); } abstract class ImplementsBadUseOfAddValueMethodsToRuleKey implements BadUseOfAddValueMethodsToRuleKey {} abstract class DerivedFromImplementsBadUseOfAddValueMethodsToRuleKey extends ImplementsBadUseOfAddValueMethodsToRuleKey {} private static class TestRuleKeyAppendable implements AddsToRuleKey { @AddToRuleKey private final String value; @AddToRuleKey private final String foo = "foo"; @AddToRuleKey private final String bar = "bar"; public TestRuleKeyAppendable(String value) { this.value = value; } } private static class TestRuleKeyAppendableBuildRule extends NoopBuildRuleWithDeclaredAndExtraDeps { private final String foo; @SuppressWarnings("PMD.UnusedPrivateField") @AddToRuleKey private final String bar; public TestRuleKeyAppendableBuildRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams buildRuleParams, String foo, String bar) { super(buildTarget, projectFilesystem, buildRuleParams); this.foo = foo; this.bar = bar; } @Override public void appendToRuleKey(RuleKeyObjectSink sink) { sink.setReflectively("foo", foo); } } private DefaultRuleKeyFactory.Builder<HashCode> createBuilder( SourcePathResolver resolver, SourcePathRuleFinder ruleFinder) { TestDefaultRuleKeyFactory factory = createFactory(resolver, ruleFinder); BuildTarget buildTarget = BuildTargetFactory.newInstance("//some:example"); BuildRule buildRule = new FakeBuildRule(buildTarget); return factory.newBuilderForTesting(buildRule); } private TestDefaultRuleKeyFactory createFactory( SourcePathResolver resolver, SourcePathRuleFinder ruleFinder) { FileHashCache fileHashCache = new FileHashCache() { @Override public void invalidate(Path path) {} @Override public void invalidateAll() {} @Override public HashCode get(Path path) { return HashCode.fromString("deadbeef"); } @Override public HashCode get(ArchiveMemberPath archiveMemberPath) { return HashCode.fromString("deadbeef"); } @Override public long getSize(Path path) { return 0; } @Override public void set(Path path, HashCode hashCode) {} }; return new TestDefaultRuleKeyFactory(fileHashCache, resolver, ruleFinder); } private RuleKeyResult<RuleKey> buildResult(AbstractRuleKeyBuilder<HashCode> builder) { return ((DefaultRuleKeyFactory.Builder<HashCode>) builder).buildResult(RuleKey::new); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.core.util; import com.azure.core.http.HttpHeaders; import com.azure.core.http.policy.HttpLogOptions; import com.azure.core.util.logging.ClientLogger; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Stream; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; public class CoreUtilsTests { private static final byte[] BYTES = "Hello world!".getBytes(StandardCharsets.UTF_8); private static final byte[] UTF_8_BOM = {(byte) 0xEF, (byte) 0xBB, (byte) 0xBF}; private static final byte[] UTF_16BE_BOM = {(byte) 0xFE, (byte) 0xFF}; private static final byte[] UTF_16LE_BOM = {(byte) 0xFF, (byte) 0xFE}; private static final byte[] UTF_32BE_BOM = {(byte) 0x00, (byte) 0x00, (byte) 0xFE, (byte) 0xFF}; private static final byte[] UTF_32LE_BOM = {(byte) 0xFF, (byte) 0xFE, (byte) 0x00, (byte) 0x00}; private static final String TIMEOUT_PROPERTY_NAME = "TIMEOUT_PROPERTY_NAME"; @Test public void findFirstOfTypeEmptyArgs() { assertNull(CoreUtils.findFirstOfType(null, Integer.class)); } @Test public void findFirstOfTypeWithOneOfType() { int expected = 1; Object[] args = { "string", expected }; Integer actual = CoreUtils.findFirstOfType(args, Integer.class); Assertions.assertEquals(expected, actual); } @Test public void findFirstOfTypeWithMultipleOfType() { int expected = 1; Object[] args = { "string", expected, 10 }; Integer actual = CoreUtils.findFirstOfType(args, Integer.class); Assertions.assertEquals(expected, actual); } @Test public void findFirstOfTypeWithNoneOfType() { Object[] args = { "string", "anotherString" }; assertNull(CoreUtils.findFirstOfType(args, Integer.class)); } @Test public void testProperties() { assertNotNull(CoreUtils.getProperties("azure-core.properties").get("version")); assertNotNull(CoreUtils.getProperties("azure-core.properties").get("name")); assertTrue(CoreUtils.getProperties("azure-core.properties").get("version") .matches("\\d+\\.\\d+\\.\\d+(-beta\\.\\d+)?")); } @Test public void testMissingProperties() { assertNotNull(CoreUtils.getProperties("foo.properties")); assertTrue(CoreUtils.getProperties("foo.properties").isEmpty()); assertNull(CoreUtils.getProperties("azure-core.properties").get("foo")); } @ParameterizedTest @MethodSource("cloneIntArraySupplier") public void cloneIntArray(int[] intArray, int[] expected) { assertArrayEquals(expected, CoreUtils.clone(intArray)); } private static Stream<Arguments> cloneIntArraySupplier() { return Stream.of( Arguments.of(null, null), Arguments.of(new int[0], new int[0]), Arguments.of(new int[] { 1, 2, 3}, new int[] { 1, 2, 3}) ); } @ParameterizedTest @MethodSource("cloneGenericArraySupplier") public <T> void cloneGenericArray(T[] genericArray, T[] expected) { assertArrayEquals(expected, CoreUtils.clone(genericArray)); } private static Stream<Arguments> cloneGenericArraySupplier() { return Stream.of( Arguments.of(null, null), Arguments.of(new String[0], new String[0]), Arguments.of(new String[] { "1", "2", "3"}, new String[] { "1", "2", "3" }) ); } @ParameterizedTest @MethodSource("isNullOrEmptyCollectionSupplier") public void isNullOrEmptyCollection(Collection<?> collection, boolean expected) { assertEquals(expected, CoreUtils.isNullOrEmpty(collection)); } private static Stream<Arguments> isNullOrEmptyCollectionSupplier() { return Stream.of( Arguments.of(null, true), Arguments.of(new ArrayList<>(), true), Arguments.of(Collections.singletonList(1), false) ); } @ParameterizedTest @MethodSource("arrayToStringSupplier") public <T> void arrayToString(T[] array, Function<T, String> mapper, String expected) { assertEquals(expected, CoreUtils.arrayToString(array, mapper)); } private static Stream<Arguments> arrayToStringSupplier() { Function<?, String> toStringFunction = String::valueOf; return Stream.of( Arguments.of(null, null, null), Arguments.of(new String[0], toStringFunction, null), Arguments.of(new String[] { "" }, toStringFunction, ""), Arguments.of(new String[] { "Hello world!" }, toStringFunction, "Hello world!"), Arguments.of(new String[] { "1", "2", "3" }, toStringFunction, "1,2,3") ); } @ParameterizedTest @MethodSource("bomAwareToStringSupplier") public void bomAwareToString(byte[] bytes, String contentType, String expected) { assertEquals(expected, CoreUtils.bomAwareToString(bytes, contentType)); } private static Stream<Arguments> bomAwareToStringSupplier() { return Stream.of( Arguments.arguments(null, null, null), Arguments.arguments(BYTES, null, new String(BYTES, StandardCharsets.UTF_8)), Arguments.arguments(BYTES, "charset=UTF-16BE", new String(BYTES, StandardCharsets.UTF_16BE)), Arguments.arguments(BYTES, "charset=invalid", new String(BYTES, StandardCharsets.UTF_8)), Arguments.arguments(addBom(UTF_8_BOM), null, new String(BYTES, StandardCharsets.UTF_8)), Arguments.arguments(addBom(UTF_16BE_BOM), null, new String(BYTES, StandardCharsets.UTF_16BE)), Arguments.arguments(addBom(UTF_16LE_BOM), null, new String(BYTES, StandardCharsets.UTF_16LE)), Arguments.arguments(addBom(UTF_32BE_BOM), null, new String(BYTES, Charset.forName("UTF-32BE"))), Arguments.arguments(addBom(UTF_32LE_BOM), null, new String(BYTES, Charset.forName("UTF-32LE"))), Arguments.arguments(addBom(UTF_8_BOM), "charset=UTF-8", new String(BYTES, StandardCharsets.UTF_8)), Arguments.arguments(addBom(UTF_8_BOM), "charset=UTF-16BE", new String(BYTES, StandardCharsets.UTF_8)) ); } private static byte[] addBom(byte[] arr1) { byte[] mergedArray = new byte[arr1.length + BYTES.length]; System.arraycopy(arr1, 0, mergedArray, 0, arr1.length); System.arraycopy(BYTES, 0, mergedArray, arr1.length, BYTES.length); return mergedArray; } @ParameterizedTest @MethodSource("getApplicationIdSupplier") public void getApplicationId(ClientOptions clientOptions, HttpLogOptions logOptions, String expected) { assertEquals(expected, CoreUtils.getApplicationId(clientOptions, logOptions)); } @SuppressWarnings("deprecation") private static Stream<Arguments> getApplicationIdSupplier() { String clientOptionApplicationId = "clientOptions"; String logOptionsApplicationId = "logOptions"; ClientOptions clientOptionsWithApplicationId = new ClientOptions().setApplicationId(clientOptionApplicationId); ClientOptions clientOptionsWithoutApplicationId = new ClientOptions(); HttpLogOptions logOptionsWithApplicationId = new HttpLogOptions().setApplicationId(logOptionsApplicationId); HttpLogOptions logOptionsWithoutApplicationId = new HttpLogOptions(); return Stream.of( Arguments.of(clientOptionsWithApplicationId, logOptionsWithApplicationId, clientOptionApplicationId), Arguments.of(clientOptionsWithApplicationId, logOptionsWithoutApplicationId, clientOptionApplicationId), Arguments.of(clientOptionsWithApplicationId, null, clientOptionApplicationId), Arguments.of(clientOptionsWithoutApplicationId, logOptionsWithApplicationId, logOptionsApplicationId), Arguments.of(clientOptionsWithoutApplicationId, logOptionsWithoutApplicationId, null), Arguments.of(clientOptionsWithoutApplicationId, null, null), Arguments.of(null, logOptionsWithApplicationId, logOptionsApplicationId), Arguments.of(null, logOptionsWithoutApplicationId, null), Arguments.of(null, null, null) ); } @ParameterizedTest @MethodSource("createHttpHeadersFromClientOptionsSupplier") public void createHttpHeadersFromClientOptions(ClientOptions clientOptions, HttpHeaders expected) { HttpHeaders actual = CoreUtils.createHttpHeadersFromClientOptions(clientOptions); if (expected == null) { assertNull(actual); } else { assertEquals(expected.toMap(), actual.toMap()); } } private static Stream<Arguments> createHttpHeadersFromClientOptionsSupplier() { List<Header> multipleHeadersList = new ArrayList<>(); multipleHeadersList.add(new Header("a", "header")); multipleHeadersList.add(new Header("another", "headerValue")); Map<String, String> multipleHeadersMap = new HashMap<>(); multipleHeadersMap.put("a", "header"); multipleHeadersMap.put("another", "headerValue"); return Stream.of( // ClientOptions is null, null is returned. Arguments.of(null, null), // ClientOptions doesn't contain Header values, null is returned. Arguments.of(new ClientOptions(), null), // ClientOptions contains a single header value, a single header HttpHeaders is returned. Arguments.of(new ClientOptions().setHeaders(Collections.singletonList(new Header("a", "header"))), new HttpHeaders(Collections.singletonMap("a", "header"))), // ClientOptions contains multiple header values, a multi-header HttpHeaders is returned. Arguments.of(new ClientOptions().setHeaders(multipleHeadersList), new HttpHeaders(multipleHeadersMap)) ); } @ParameterizedTest @MethodSource("getDefaultTimeoutFromEnvironmentSupplier") public void getDefaultTimeoutFromEnvironmentTests(Configuration configuration, Duration defaultTimeout, ClientLogger logger, Duration expectedTimeout) { assertEquals(expectedTimeout, CoreUtils.getDefaultTimeoutFromEnvironment(configuration, TIMEOUT_PROPERTY_NAME, defaultTimeout, logger)); } private static Stream<Arguments> getDefaultTimeoutFromEnvironmentSupplier() { ClientLogger logger = new ClientLogger(CoreUtilsTests.class); return Stream.of( // Configuration doesn't have the timeout property configured. Arguments.of(Configuration.NONE, Duration.ofMillis(10000), logger, Duration.ofMillis(10000)), // Configuration has an empty string timeout property configured. Arguments.of(new Configuration().put(TIMEOUT_PROPERTY_NAME, ""), Duration.ofMillis(10000), logger, Duration.ofMillis(10000)), // Configuration has a value that isn't a valid number. Arguments.of(new Configuration().put(TIMEOUT_PROPERTY_NAME, "ten"), Duration.ofMillis(10000), logger, Duration.ofMillis(10000)), // Configuration has a negative value. Arguments.of(new Configuration().put(TIMEOUT_PROPERTY_NAME, "-10"), Duration.ofMillis(10000), logger, Duration.ZERO), // Configuration has a zero value. Arguments.of(new Configuration().put(TIMEOUT_PROPERTY_NAME, "0"), Duration.ofMillis(10000), logger, Duration.ZERO), // Configuration has a positive value. Arguments.of(new Configuration().put(TIMEOUT_PROPERTY_NAME, "42"), Duration.ofMillis(10000), logger, Duration.ofMillis(42)) ); } @ParameterizedTest @MethodSource("invalidContextMergeSupplier") public void invalidContextMerge(Context into, Context from) { assertThrows(NullPointerException.class, () -> CoreUtils.mergeContexts(into, from)); } private static Stream<Arguments> invalidContextMergeSupplier() { return Stream.of( Arguments.of(null, Context.NONE), Arguments.of(Context.NONE, null) ); } @Test public void mergingContextNoneReturnsIntoContext() { Context into = new Context("key", "value"); Context merged = CoreUtils.mergeContexts(into, Context.NONE); assertEquals(into, merged); } @Test public void mergingReturnsTheExpectedResult() { List<Context> expectedMergedContextChain = new ArrayList<>(); Context into = new Context("key1", "value1"); expectedMergedContextChain.add(into); into = into.addData("key2", "value2"); expectedMergedContextChain.add(into); into = into.addData("key3", "value3"); expectedMergedContextChain.add(into); Context from = new Context("key4", "value4"); expectedMergedContextChain.add(from); from = from.addData("key5", "value5"); expectedMergedContextChain.add(from); from = from.addData("key6", "value6"); expectedMergedContextChain.add(from); Context merged = CoreUtils.mergeContexts(into, from); Context[] mergedContextChain = merged.getContextChain(); assertEquals(expectedMergedContextChain.size(), mergedContextChain.length); for (int i = 0; i < expectedMergedContextChain.size(); i++) { Context expected = expectedMergedContextChain.get(i); Context actual = mergedContextChain[i]; assertEquals(expected.getKey(), actual.getKey()); assertEquals(expected.getValue(), actual.getValue()); } } }
/** * $URL$ * * $LastChangedBy$ - $LastChangedDate$ */ package com.gpac.Osmo4; import java.io.File; import android.content.Context; import android.content.pm.PackageManager.NameNotFoundException; import android.os.Environment; import android.util.Log; /** * This class handles all GPAC configuration directories * * @author Pierre Souchay (VizionR SAS) (last changed by $LastChangedBy$) * @version $Revision$ * */ /** * NOTE FOR DEVELOPERS * Whenever you add or change a path here, you MUST verify also the corresponding path in src/utils/os_config_init.c */ public class GpacConfig { private final static String LOG_GPAC_CONFIG = GpacConfig.class.getSimpleName(); /** * Default Constructor * * @param context */ public GpacConfig(Context context) { String dataDir; try { if (context == null || context.getPackageManager() == null) { dataDir = Environment.getDataDirectory() + "/data/com.gpac.Osmo4/"; //$NON-NLS-1$ Log.e(LOG_GPAC_CONFIG, "Cannot get context or PackageManager, using default directory=" + dataDir); //$NON-NLS-1$ } else dataDir = context.getPackageManager().getApplicationInfo(context.getPackageName(), 0).dataDir; } catch (NameNotFoundException e) { Log.e(LOG_GPAC_CONFIG, "This is bad, we cannot find ourself : " + context.getPackageName(), e); //$NON-NLS-1$ throw new RuntimeException("Cannot find package " + context.getPackageName(), e); //$NON-NLS-1$ } gpacAppDirectory = dataDir + '/'; Log.v(LOG_GPAC_CONFIG, "Using directory " + gpacAppDirectory + " for osmo"); //$NON-NLS-1$ //$NON-NLS-2$ gpacCacheDirectory = context.getCacheDir().getAbsolutePath(); Log.v(LOG_GPAC_CONFIG, "Using directory " + gpacCacheDirectory + " for cache"); //$NON-NLS-1$ //$NON-NLS-2$ // //Log.v(LOG_GPAC_CONFIG, "Using directory " + gpacModulesDirectory + " for modules"); //$NON-NLS-1$ //$NON-NLS-2$ gpacLibsDirectory = dataDir + "/lib/"; //$NON-NLS-1$ Log.v(LOG_GPAC_CONFIG, "Using directory " + gpacLibsDirectory + " for libraries"); //$NON-NLS-1$ //$NON-NLS-2$ gpacGuiDirectory = dataDir + "/gui/"; Log.v(LOG_GPAC_CONFIG, "Using directory " + gpacGuiDirectory + " for GUI"); //$NON-NLS-1$ //$NON-NLS-2$ gpacShaderDirectory = dataDir + "/shaders/"; Log.v(LOG_GPAC_CONFIG, "Using directory " + gpacShaderDirectory + " for shader files"); //$NON-NLS-1$ //$NON-NLS-2$ File osmo = new File(Environment.getExternalStorageDirectory(), "osmo"); //$NON-NLS-1$ gpacLogDirectory = osmo.getAbsolutePath() + "/log/"; Log.v(LOG_GPAC_CONFIG, "Using directory " + gpacLogDirectory + " for log files"); //$NON-NLS-1$ //$NON-NLS-2$ //check if GPAC.cfg exists in /sdcard/osmo File gpac_cfg = new File(osmo.getAbsolutePath(), "GPAC.cfg"); if (gpac_cfg.exists()) gpacConfigDirectory = osmo.getAbsolutePath() + "/"; else gpacConfigDirectory = null; } /** * Ensures all directories are created * * @return The {@link GpacConfig} instance itself */ public GpacConfig ensureAllDirectoriesExist() { for (String s : new String[] { gpacAppDirectory, gpacCacheDirectory, gpacShaderDirectory, gpacLogDirectory }) { createDirIfNotExist(s); } return this; } /** * Default directory for GPAC configuration directory, ends with / * * @return the gpacAppDirectory */ public String getGpacAppDirectory() { return gpacAppDirectory; } /** * Default directory for GPAC configuration directory, ends with / * * @return the gpacConfigDirectory */ public String getGpacConfigDirectory() { return gpacConfigDirectory; } /** * Directory of Android containing all fonts * * @return the gpacFontDirectory */ public String getGpacFontDirectory() { return gpacFontDirectory; } /** * Default directory for GPAC modules directory, ends with / * * @return the gpacModulesDirectory */ public String getGpacModulesDirectory() { // return gpacModulesDirectory; return gpacLibsDirectory; } /** * @return the gpacLibsDirectory */ public String getGpacLibsDirectory() { return gpacLibsDirectory; } /** * Default directory for cached files * * @return the gpacCacheDirectory */ public String getGpacCacheDirectory() { return gpacCacheDirectory; } /** * Default directory for GUI files * * @return the gpacGuiDirectory */ public String getGpacGuiDirectory() { return gpacGuiDirectory; } /** * Default directory for shader files * * @return the gpacShaderDirectory */ public String getGpacShaderDirectory() { return gpacShaderDirectory; } /** * Default directory for log files * * @return the gpacLogDirectory */ public String getGpacLogDirectory() { return gpacLogDirectory; } private final String gpacAppDirectory; private final String gpacConfigDirectory; private final String gpacFontDirectory = "/system/fonts/"; //$NON-NLS-1$ // private final String gpacModulesDirectory; private final String gpacLibsDirectory; private final String gpacCacheDirectory; private final String gpacGuiDirectory; private final String gpacShaderDirectory; private final String gpacLogDirectory; /** * Creates a given directory if it does not exist * * @param path */ private static boolean createDirIfNotExist(String path) { File f = new File(path); if (!f.exists()) { if (!f.mkdirs()) { Log.e(LOG_GPAC_CONFIG, "Failed to create directory " + path); //$NON-NLS-1$ return false; } else { Log.i(LOG_GPAC_CONFIG, "Created directory " + path); //$NON-NLS-1$ } } return true; } /** * Get the GPAC.cfg file * * @return the file */ public File getGpacConfigFile() { return new File(getGpacConfigDirectory(), "GPAC.cfg"); //$NON-NLS-1$ } /** * Get the GPAC.cfg file * * @return the file */ public File getGpacLastRevFile() { return new File(getGpacConfigDirectory(), "lastRev.txt"); //$NON-NLS-1$ } /** * Get the configuration as text * * @return a String with newlines representing all the configuration */ public String getConfigAsText() { StringBuilder sb = new StringBuilder(); sb.append("GpacAppDirectory=").append(getGpacAppDirectory()).append('\n'); //$NON-NLS-1$ sb.append("GpacModulesDirectory=").append(getGpacModulesDirectory()).append('\n'); //$NON-NLS-1$ sb.append("GpacFontDirectory=").append(getGpacFontDirectory()).append('\n'); //$NON-NLS-1$ sb.append("GpacCacheDirectory=").append(getGpacCacheDirectory()).append('\n'); //$NON-NLS-1$ sb.append("GpacGuiDirectory=").append(getGpacGuiDirectory()).append('\n'); //$NON-NLS-1$ sb.append("GpacShaderDirectory=").append(getGpacShaderDirectory()).append('\n'); //$NON-NLS-1$ return sb.toString(); } }
package i5.las2peer.services.ocd.adapters.graphInput; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.DOMException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Text; import org.w3c.dom.Attr; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import i5.las2peer.services.ocd.adapters.AdapterException; import i5.las2peer.services.ocd.graphs.CustomGraph; //import i5.las2peer.services.ocd.utils.DocIndexer; import y.base.Edge; import y.base.Node; import y.view.EdgeLabel; import y.view.EdgeRealizer; import y.view.LineType; import java.io.Reader; import java.io.FileReader; //TODO: Currently only for the youtube graph, make more general //TODO: Be able to have more Attributes for nodes(at least string id's) and maybe edges(at least type) in general public class XGMMLGraphInputAdapter extends AbstractGraphInputAdapter { public XGMMLGraphInputAdapter() { } ///////////////// //// Variables//// ///////////////// /** * Variables for to check for different edge types */ private String type1 = ""; private String type2 = ""; private String type3 = ""; /** * Variable to look for edge type indicators in values, if empty string, the * type indicators are understood as keys */ private String key = ""; @Override public void setParameter(Map<String, String> param) throws IllegalArgumentException, ParseException { // if (param.containsKey("key")) { // key = param.get("key"); // } // if (param.containsKey("type1")) { // type1 = param.get("type1"); // } // if (param.containsKey("type2")) { // type2 = param.get("type2"); // } // if (param.containsKey("type3")) { // type3 = param.get("type3"); // } } // Ignore for now as LineTypes are not stored in persistence for some reason public void setLineType(Element edgeElement, Edge edge, CustomGraph graph) { if (type1 != "" || type2 != "" || type3 != "") { EdgeRealizer eRealizer = graph.getRealizer(edge); NodeList atts = edgeElement.getChildNodes(); if (atts.getLength() != 0) { if (key.contentEquals("")) { for (int u = 0; u < atts.getLength(); u++) { if(atts.item(u).getNodeType() == 1) { Element e = (Element) atts.item(u); System.out.println(e.getAttribute(type2)); System.out.println(e.getAttribute("name")); if (type1 != "" && e.hasAttribute(type1)) { eRealizer.setLineType(LineType.LINE_1); break; } else if (type2 != "" && e.hasAttribute(type2)) { eRealizer.setLineType(LineType.DASHED_1); System.out.println(eRealizer.getLineType().equals(LineType.DASHED_1)); break; } else if (type3 != "" && e.hasAttribute(type3)) { eRealizer.setLineType(LineType.DOTTED_1); break; } } } } else { for (int u = 0; u < atts.getLength(); u++) { if(atts.item(u).getNodeType() == 1) { Element e = (Element) atts.item(u); if (type1 != "" && e.getAttribute(key).contentEquals(type1)) { eRealizer.setLineType(LineType.LINE_1); break; } else if (type2 != "" && e.getAttribute(key).contentEquals(type2)) { eRealizer.setLineType(LineType.DASHED_1); break; } else if (type3 != "" && e.getAttribute(key).contentEquals(type3)) { eRealizer.setLineType(LineType.DOTTED_1); break; } } } } } } } @Override public CustomGraph readGraph() throws AdapterException { CustomGraph graph = new CustomGraph(); Map<String, Node> nodeIds = new HashMap<String, Node>(); // Map<String, String> nodeContents = new HashMap<String, String>(); try { // File file = new File(filePath); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder; docBuilder = factory.newDocumentBuilder(); //this.reader = new FileReader(filePath); //TODO: Only for tests, find a better way to run those and to not have to comment/uncomment this everytime BufferedReader br = new BufferedReader(this.reader); String line = null; StringBuilder sb = new StringBuilder(); while ((line = br.readLine()) != null) { sb.append(line); } InputSource is = new InputSource(new StringReader(sb.toString())); // System.out.println(sb.toString()); Document doc = docBuilder.parse(is); Element docElement = doc.getDocumentElement(); boolean undirected = false; if (Integer.parseInt(docElement.getAttribute("directed")) == 0) { undirected = true; } NodeList nodeList = docElement.getElementsByTagName("node"); int nodeListLength = nodeList.getLength(); //System.out.println("GRAPH LEN: " + nodeListLength); for (int i = 0; i < nodeListLength; i++) { Element e = (Element) nodeList.item(i); // Date d = df.parse(e.getAttribute("CreationDate")); Node node; // String customNodeContent = textProc.preprocText(e.getAttribute("Body")); String customNodeId = e.getAttribute("id"); String customNodeName = ""; // String customNodeParent = e.getAttribute("ParentId"); // TODO: Get rid of customNodeName, apparently not able to add more attributes NodeList attributes = e.getElementsByTagName("att"); for (int a = 0; a < attributes.getLength(); a++) { if (((Element) attributes.item(a)).getAttribute("name") == "snippet") { NodeList snippetAttributes = e.getElementsByTagName("att"); for (int b = 0; b < snippetAttributes.getLength(); b++) { if (((Element) snippetAttributes.item(a)).getAttribute("name") == "title") { customNodeName = ((Element) snippetAttributes.item(a)).getAttribute("name"); } else break; } break; } } if (customNodeName == "") { customNodeName = customNodeId; } // node does not yet exist if (!nodeIds.containsKey(customNodeId)) { node = graph.createNode(); // create new node and add attributes graph.setNodeName(node, customNodeId); nodeIds.put(customNodeId, node); // nodeContents.put(customNodeName, customNodeContent); } // TODO: Maybe do an else case } // A bit confusing due to the class NodeList edgeList = docElement.getElementsByTagName("edge"); int edgeListLength = edgeList.getLength(); Map<String, Edge> edgeMap = new HashMap<String, Edge>(); // create edges for each entry in the temporary edge list for (int i = 0; i < edgeListLength; i++) { Element e = (Element) edgeList.item(i); if (nodeIds.containsKey(e.getAttribute("source")) && nodeIds.containsKey(e.getAttribute("target"))) { if (!edgeMap.containsKey(e.getAttribute("label"))) { Edge edge = graph.createEdge(nodeIds.get(e.getAttribute("source")), nodeIds.get(e.getAttribute("target"))); //setLineType(e, edge, graph); if (undirected) { Edge reverseEdge = graph.createEdge(nodeIds.get(e.getAttribute("target")), nodeIds.get(e.getAttribute("source"))); //graph.getRealizer(reverseEdge).setLineType(graph.getRealizer(edge).getLineType()); } edgeMap.put(e.getAttribute("source") + e.getAttribute("target"), edge); } } else { continue; } } } catch (ParserConfigurationException e) { e.printStackTrace(); } catch (SAXException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (DOMException e) { // TODO Auto-generated catch block e.printStackTrace(); } return graph; } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.ofbiz.entity.config.model; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.ofbiz.base.lang.ThreadSafe; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.UtilXml; import org.ofbiz.entity.GenericEntityConfException; import org.w3c.dom.Element; /** * An object that models the <code>&lt;datasource&gt;</code> element. * * SCIPIO: 2.1.0: Added system property override support: <code>-Dscipio.entity.datasource.[name].[the-attr]=[value]</code> * * @see <code>entity-config.xsd</code> */ @ThreadSafe public final class Datasource { private static final Debug.OfbizLogger module = Debug.getOfbizLogger(java.lang.invoke.MethodHandles.lookup().lookupClass()); protected static final String SYSPROP_NAME = "scipio.entity.datasource"; /* public static final int TYPE_JNDI_JDBC = 1; public static final int TYPE_INLINE_JDBC = 2; public static final int TYPE_TYREX_DATA_SOURCE = 3; public static final int TYPE_OTHER = 4; */ private final String name; // type = xs:string private final String helperClass; // type = xs:string private final String fieldTypeName; // type = xs:string private final boolean useSchemas; private final String schemaName; // type = xs:string private final boolean checkOnStart; private final boolean addMissingOnStart; private final boolean usePkConstraintNames; private final boolean checkPksOnStart; private final int constraintNameClipLength; // type = xs:nonNegativeInteger private final boolean useProxyCursor; private final String proxyCursorName; // type = xs:string private final int resultFetchSize; // type = xs:integer private final boolean useForeignKeys; private final boolean useForeignKeyIndices; private final boolean checkFksOnStart; private final boolean checkFkIndicesOnStart; private final String fkStyle; private final boolean useFkInitiallyDeferred; private final boolean useIndices; private final boolean useIndicesUnique; private final boolean checkIndicesOnStart; private final boolean checkModifiedIndicesOnStart; // SCIPIO private final String joinStyle; private final boolean aliasViewColumns; private final boolean alwaysUseConstraintKeyword; private final boolean dropFkUseForeignKeyKeyword; private final boolean useBinaryTypeForBlob; private final boolean useOrderByNulls; private final String offsetStyle; private final String tableType; // type = xs:string private final String characterSet; // type = xs:string private final String collate; // type = xs:string private final String rowFormat; private final int maxWorkerPoolSize; // type = xs:integer private final List<SqlLoadPath> sqlLoadPathList; // <sql-load-path> private final List<ReadData> readDataList; // <read-data> private final InlineJdbc inlineJdbc; // <inline-jdbc> private final JndiJdbc jndiJdbc; // <jndi-jdbc> private final TyrexDataSource tyrexDataSource; // <tyrex-dataSource> Datasource(Element element) throws GenericEntityConfException { // SCIPIO: Modified for command-line override support String lineNumberText = EntityConfig.createConfigFileLineNumberText(element); String name = element.getAttribute("name").intern(); if (name.isEmpty()) { throw new GenericEntityConfException("<datasource> element name attribute is empty" + lineNumberText); } this.name = name; String helperClass = getAttr(name, element, "helper-class"); if (helperClass.isEmpty()) { throw new GenericEntityConfException("<datasource> element helper-class attribute is empty" + lineNumberText); } this.helperClass = helperClass; String fieldTypeName = getAttr(name, element, "field-type-name"); if (fieldTypeName.isEmpty()) { throw new GenericEntityConfException("<datasource> element field-type-name attribute is empty" + lineNumberText); } this.fieldTypeName = fieldTypeName; this.useSchemas = !"false".equals(getAttr(name, element, "use-schemas")); this.schemaName = getAttr(name, element, "schema-name"); this.checkOnStart = !"false".equals(getAttr(name, element, "check-on-start")); this.addMissingOnStart = "true".equals(getAttr(name, element, "add-missing-on-start")); this.usePkConstraintNames = !"false".equals(getAttr(name, element, "use-pk-constraint-names")); this.checkPksOnStart = !"false".equals(getAttr(name, element, "check-pks-on-start")); String constraintNameClipLength = getAttr(name, element, "constraint-name-clip-length"); if (constraintNameClipLength.isEmpty()) { this.constraintNameClipLength = 30; } else { try { this.constraintNameClipLength = Integer.parseInt(constraintNameClipLength); } catch (Exception e) { throw new GenericEntityConfException("<datasource> element constraint-name-clip-length attribute is invalid" + lineNumberText); } } this.useProxyCursor = "true".equalsIgnoreCase(getAttr(name, element, "use-proxy-cursor")); String proxyCursorName = getAttr(name, element, "proxy-cursor-name"); if (proxyCursorName.isEmpty()) { proxyCursorName = "p_cursor"; } this.proxyCursorName = proxyCursorName; String resultFetchSize = getAttr(name, element, "result-fetch-size"); if (resultFetchSize.isEmpty()) { this.resultFetchSize = -1; } else { try { this.resultFetchSize = Integer.parseInt(resultFetchSize); } catch (Exception e) { throw new GenericEntityConfException("<datasource> element result-fetch-size attribute is invalid" + lineNumberText); } } this.useForeignKeys = !"false".equals(getAttr(name, element, "use-foreign-keys")); this.useForeignKeyIndices = !"false".equals(getAttr(name, element, "use-foreign-key-indices")); this.checkFksOnStart = "true".equals(getAttr(name, element, "check-fks-on-start")); this.checkFkIndicesOnStart = "true".equals(getAttr(name, element, "check-fk-indices-on-start")); String fkStyle = getAttr(name, element, "fk-style"); if (fkStyle.isEmpty()) { fkStyle = "name_constraint"; } this.fkStyle = fkStyle; this.useFkInitiallyDeferred = "true".equals(getAttr(name, element, "use-fk-initially-deferred")); this.useIndices = !"false".equals(getAttr(name, element, "use-indices")); this.useIndicesUnique = !"false".equals(getAttr(name, element, "use-indices-unique")); this.checkIndicesOnStart = "true".equals(getAttr(name, element, "check-indices-on-start")); this.checkModifiedIndicesOnStart = this.checkIndicesOnStart && !"false".equals(getAttr(name, element, "check-modified-indices-on-start")); String joinStyle = getAttr(name, element, "join-style"); if (joinStyle.isEmpty()) { joinStyle = "ansi"; } this.joinStyle = joinStyle; this.aliasViewColumns = "true".equals(getAttr(name, element, "alias-columns")); this.alwaysUseConstraintKeyword = "true".equals(getAttr(name, element, "always-use-constraint-keyword")); this.dropFkUseForeignKeyKeyword = "true".equals(getAttr(name, element, "drop-fk-use-foreign-key-keyword")); this.useBinaryTypeForBlob = "true".equals(getAttr(name, element, "use-binary-type-for-blob")); this.useOrderByNulls = "true".equals(getAttr(name, element, "use-order-by-nulls")); String offsetStyle = getAttr(name, element, "offset-style"); if (offsetStyle.isEmpty()) { offsetStyle = "none"; } this.offsetStyle = offsetStyle; this.tableType = getAttr(name, element, "table-type"); this.characterSet = getAttr(name, element, "character-set"); this.collate = getAttr(name, element, "collate"); this.rowFormat = getAttr(name, element, "row-format"); String maxWorkerPoolSize = getAttr(name, element, "max-worker-pool-size"); if (maxWorkerPoolSize.isEmpty()) { this.maxWorkerPoolSize = 1; } else { try { int maxWorkerPoolSizeInt = Integer.parseInt(maxWorkerPoolSize); if (maxWorkerPoolSizeInt == 0) { maxWorkerPoolSizeInt = 1; } else if (maxWorkerPoolSizeInt < 0) { maxWorkerPoolSizeInt = Math.abs(maxWorkerPoolSizeInt) * Runtime.getRuntime().availableProcessors(); } this.maxWorkerPoolSize = maxWorkerPoolSizeInt; } catch (NumberFormatException e) { throw new GenericEntityConfException("<datasource> element max-worker-pool-size attribute is invalid" + lineNumberText); } } List<? extends Element> sqlLoadPathElementList = UtilXml.childElementList(element, "sql-load-path"); if (sqlLoadPathElementList.isEmpty()) { this.sqlLoadPathList = Collections.emptyList(); } else { List<SqlLoadPath> sqlLoadPathList = new ArrayList<SqlLoadPath>(sqlLoadPathElementList.size()); for (Element sqlLoadPathElement : sqlLoadPathElementList) { sqlLoadPathList.add(new SqlLoadPath(sqlLoadPathElement)); } this.sqlLoadPathList = Collections.unmodifiableList(sqlLoadPathList); } List<? extends Element> readDataElementList = UtilXml.childElementList(element, "read-data"); if (readDataElementList.isEmpty()) { this.readDataList = Collections.emptyList(); } else { List<ReadData> readDataList = new ArrayList<ReadData>(readDataElementList.size()); for (Element readDataElement : readDataElementList) { readDataList.add(new ReadData(readDataElement)); } this.readDataList = Collections.unmodifiableList(readDataList); } int jdbcElementCount = 0; Element inlineJdbcElement = UtilXml.firstChildElement(element, "inline-jdbc"); if (inlineJdbcElement == null) { this.inlineJdbc = null; } else { this.inlineJdbc = new InlineJdbc(inlineJdbcElement); jdbcElementCount++; } Element jndiJdbcElement = UtilXml.firstChildElement(element, "jndi-jdbc"); if (jndiJdbcElement == null) { this.jndiJdbc = null; } else { this.jndiJdbc = new JndiJdbc(jndiJdbcElement); jdbcElementCount++; } Element tyrexElement = UtilXml.firstChildElement(element, "tyrex-dataSource"); if (tyrexElement == null) { this.tyrexDataSource = null; } else { this.tyrexDataSource = new TyrexDataSource(tyrexElement); jdbcElementCount++; } if (jdbcElementCount > 1) { throw new GenericEntityConfException("<datasource> element is invalid: Only one of <inline-jdbc>, <jndi-jdbc>, <tyrex-dataSource> is allowed" + lineNumberText); } } /** Returns the value of the <code>name</code> attribute. */ public String getName() { return this.name; } /** Returns the value of the <code>helper-class</code> attribute. */ public String getHelperClass() { return this.helperClass; } /** Returns the value of the <code>field-type-name</code> attribute. */ public String getFieldTypeName() { return this.fieldTypeName; } /** Returns the value of the <code>use-schemas</code> attribute. */ public boolean getUseSchemas() { return this.useSchemas; } /** Returns the value of the <code>schema-name</code> attribute. */ public String getSchemaName() { return this.schemaName; } /** Returns the value of the <code>check-on-start</code> attribute. */ public boolean getCheckOnStart() { return this.checkOnStart; } /** Returns the value of the <code>add-missing-on-start</code> attribute. */ public boolean getAddMissingOnStart() { return this.addMissingOnStart; } /** Returns the value of the <code>use-pk-constraint-names</code> attribute. */ public boolean getUsePkConstraintNames() { return this.usePkConstraintNames; } /** Returns the value of the <code>check-pks-on-start</code> attribute. */ public boolean getCheckPksOnStart() { return this.checkPksOnStart; } /** Returns the value of the <code>constraint-name-clip-length</code> attribute. */ public int getConstraintNameClipLength() { return this.constraintNameClipLength; } /** Returns the value of the <code>use-proxy-cursor</code> attribute. */ public boolean getUseProxyCursor() { return this.useProxyCursor; } /** Returns the value of the <code>proxy-cursor-name</code> attribute. */ public String getProxyCursorName() { return this.proxyCursorName; } /** Returns the value of the <code>result-fetch-size</code> attribute. */ public int getResultFetchSize() { return this.resultFetchSize; } /** Returns the value of the <code>use-foreign-keys</code> attribute. */ public boolean getUseForeignKeys() { return this.useForeignKeys; } /** Returns the value of the <code>use-foreign-key-indices</code> attribute. */ public boolean getUseForeignKeyIndices() { return this.useForeignKeyIndices; } /** Returns the value of the <code>check-fks-on-start</code> attribute. */ public boolean getCheckFksOnStart() { return this.checkFksOnStart; } /** Returns the value of the <code>check-fk-indices-on-start</code> attribute. */ public boolean getCheckFkIndicesOnStart() { return this.checkFkIndicesOnStart; } /** Returns the value of the <code>fk-style</code> attribute. */ public String getFkStyle() { return this.fkStyle; } /** Returns the value of the <code>use-fk-initially-deferred</code> attribute. */ public boolean getUseFkInitiallyDeferred() { return this.useFkInitiallyDeferred; } /** Returns the value of the <code>use-indices</code> attribute. */ public boolean getUseIndices() { return this.useIndices; } /** Returns the value of the <code>use-indices-unique</code> attribute. */ public boolean getUseIndicesUnique() { return this.useIndicesUnique; } /** Returns the value of the <code>check-indices-on-start</code> attribute. */ public boolean getCheckIndicesOnStart() { return this.checkIndicesOnStart; } /** * Returns the value of the <code>check-modified-indices-on-start</code> attribute. * <p>SCIPIO: 2.1.0: Added.</p> */ public boolean getCheckModifiedIndicesOnStart() { return this.checkModifiedIndicesOnStart; } /** Returns the value of the <code>join-style</code> attribute. */ public String getJoinStyle() { return this.joinStyle; } /** Returns the value of the <code>alias-view-columns</code> attribute. */ public boolean getAliasViewColumns() { return this.aliasViewColumns; } /** Returns the value of the <code>always-use-constraint-keyword</code> attribute. */ public boolean getAlwaysUseConstraintKeyword() { return this.alwaysUseConstraintKeyword; } /** Returns the value of the <code>drop-fk-use-foreign-key-keyword</code> attribute. */ public boolean getDropFkUseForeignKeyKeyword() { return this.dropFkUseForeignKeyKeyword; } /** Returns the value of the <code>use-binary-type-for-blob</code> attribute. */ public boolean getUseBinaryTypeForBlob() { return this.useBinaryTypeForBlob; } /** Returns the value of the <code>use-order-by-nulls</code> attribute. */ public boolean getUseOrderByNulls() { return this.useOrderByNulls; } /** Returns the value of the <code>offset-style</code> attribute. */ public String getOffsetStyle() { return this.offsetStyle; } /** Returns the value of the <code>table-type</code> attribute. */ public String getTableType() { return this.tableType; } /** Returns the value of the <code>character-set</code> attribute. */ public String getCharacterSet() { return this.characterSet; } /** Returns the value of the <code>collate</code> attribute. */ public String getCollate() { return this.collate; } /** Returns the value of the <code>useDynamicRowFormat</code> attribute. */ public String getRowFormat() { return this.rowFormat; } /** Returns the value of the <code>max-worker-pool-size</code> attribute. */ public int getMaxWorkerPoolSize() { return this.maxWorkerPoolSize; } /** Returns the <code>&lt;sql-load-path&gt;</code> child elements. */ public List<SqlLoadPath> getSqlLoadPathList() { return this.sqlLoadPathList; } /** Returns the <code>&lt;read-data&gt;</code> child elements. */ public List<ReadData> getReadDataList() { return this.readDataList; } /** Returns the <code>&lt;inline-jdbc&gt;</code> child element. */ public InlineJdbc getInlineJdbc() { return this.inlineJdbc; } /** Returns the <code>&lt;jndi-jdbc&gt;</code> child element. */ public JndiJdbc getJndiJdbc() { return this.jndiJdbc; } /** Returns the <code>&lt;tyrex-dataSource&gt;</code> child element. */ public TyrexDataSource getTyrexDataSource() { return this.tyrexDataSource; } protected static String getAttr(String dataSourceName, Element element, String attrName) { // SCIPIO String propName = SYSPROP_NAME + "." + dataSourceName + "." + attrName; String value = System.getProperty(propName); if (value != null) { Debug.logInfo("Applied attribute from system property: [" + propName + "=" + value + "]", module); return value; } return element.getAttribute(attrName).intern(); } }
/******************************************************************************* * Copyright FUJITSU LIMITED 2017 *******************************************************************************/ package org.oscm.billingservice.service; import static org.mockito.Mockito.mock; import java.math.BigDecimal; import java.util.Arrays; import java.util.Calendar; import java.util.TimeZone; import java.util.concurrent.Callable; import org.junit.Assert; import org.junit.Test; import org.oscm.accountservice.dao.UserLicenseDao; import org.oscm.billingservice.business.calculation.revenue.RevenueCalculatorBean; import org.oscm.billingservice.business.calculation.share.SharesCalculatorBean; import org.oscm.billingservice.dao.BillingDataRetrievalServiceBean; import org.oscm.billingservice.dao.SharesDataRetrievalServiceBean; import org.oscm.communicationservice.bean.CommunicationServiceBean; import org.oscm.dataservice.bean.DataServiceBean; import org.oscm.dataservice.local.DataService; import org.oscm.domobjects.BillingResult; import org.oscm.domobjects.Organization; import org.oscm.domobjects.PlatformUser; import org.oscm.domobjects.SupportedCurrency; import org.oscm.domobjects.enums.OrganizationReferenceType; import org.oscm.i18nservice.bean.LocalizerServiceBean; import org.oscm.internal.intf.BillingService; import org.oscm.internal.types.enumtypes.OrganizationRoleType; import org.oscm.internal.types.exception.NonUniqueBusinessKeyException; import org.oscm.test.EJBTestBase; import org.oscm.test.data.Organizations; import org.oscm.test.data.SupportedCountries; import org.oscm.test.data.SupportedCurrencies; import org.oscm.test.ejb.TestContainer; import org.oscm.test.stubs.ConfigurationServiceStub; import org.oscm.test.stubs.TriggerQueueServiceStub; // Temporary ignored because of RQ: Flexible billing cut-off day public class BillingServiceGetCustomerBillingDataIT extends EJBTestBase { protected DataService dm; protected Organization supplier; protected PlatformUser supplierUser; protected Organization supplier2; protected PlatformUser supplier2User; protected Organization customer; protected BillingResult br1; protected BillingResult br2; protected BillingResult brForSupplier2; private BillingService bs; protected long start1; protected long end1; protected long start2; protected long end2; private static final BigDecimal GROSS_REVENUE = BigDecimal.valueOf(743342); private static final BigDecimal NET_REVENUE = BigDecimal.valueOf(423746); @Override protected void setup(TestContainer container) throws Exception { container.login("1"); container.addBean(new ConfigurationServiceStub()); container.addBean(new DataServiceBean()); container.addBean(new LocalizerServiceBean()); container.addBean(new BillingDataRetrievalServiceBean()); container.addBean(mock(SharesDataRetrievalServiceBean.class)); container.addBean(new TriggerQueueServiceStub()); container.addBean(new RevenueCalculatorBean()); container.addBean(mock(SharesCalculatorBean.class)); container.addBean(new ConfigurationServiceStub()); container.addBean(new CommunicationServiceBean()); container.addBean(new UserLicenseDao()); container.addBean(new BillingServiceBean()); dm = container.get(DataService.class); bs = container.get(BillingService.class); runTX(new Callable<Void>() { @Override public Void call() throws Exception { createOrganizationRoles(dm); createPaymentTypes(dm); createSupportedCurrencies(dm); SupportedCountries.createSomeSupportedCountries(dm); dm.flush(); supplier = Organizations.createOrganization(dm, OrganizationRoleType.SUPPLIER); supplierUser = Organizations.createUserForOrg(dm, supplier, true, "admin"); customer = Organizations.createCustomer(dm, supplier); supplier2 = Organizations.createOrganization(dm, OrganizationRoleType.SUPPLIER); supplier2User = Organizations.createUserForOrg(dm, supplier2, true, "admin2"); Organizations.createOrganizationReference(supplier2, customer, OrganizationReferenceType.SUPPLIER_TO_CUSTOMER, dm); return null; } }); runTX(new Callable<Void>() { @Override public Void call() throws Exception { start1 = getDate(2011, 0, 1); end1 = getDate(2011, 1, 1); start2 = getDate(2011, 1, 1); end2 = getDate(2011, 2, 1); SupportedCurrency currency_EUR = SupportedCurrencies .findOrCreate(dm, "EUR"); br1 = createBillingData(dm, customer.getKey(), start1, end1, getBillingXml(start1, end1, null), supplier.getKey(), currency_EUR); br2 = createBillingData(dm, customer.getKey(), start2, end2, getBillingXml(start2, end2, null), supplier.getKey(), currency_EUR); brForSupplier2 = createBillingData(dm, customer.getKey(), start2, end2, getBillingXml(start2, end2, supplier2), supplier2.getKey(), currency_EUR); return null; } }); } @Test public void testBug7414_WithoutLimits() throws Exception { container.login(supplierUser.getKey(), ROLE_SERVICE_MANAGER); byte[] data = bs.getCustomerBillingData(null, null, Arrays.asList(new String[] { customer.getOrganizationId() })); String result = new String(data, "UTF-8"); Assert.assertTrue(result, result.contains(getBillingXml(start1, end1, null))); Assert.assertTrue(result, result.contains(getBillingXml(start2, end2, null))); Assert.assertFalse(result, result.contains(getBillingXml(start2, end2, supplier2))); } @Test public void testBug7414_EndEqualsPeriodTwoEnd() throws Exception { container.login(supplierUser.getKey(), ROLE_SERVICE_MANAGER); byte[] data = bs.getCustomerBillingData(null, Long.valueOf(end2), Arrays.asList(new String[] { customer.getOrganizationId() })); String result = new String(data, "UTF-8"); Assert.assertTrue(result, result.contains(getBillingXml(start1, end1, null))); Assert.assertTrue(result, result.contains(getBillingXml(start2, end2, null))); } @Test public void testBug7414_StartEqualsPeriodOneStart() throws Exception { container.login(supplierUser.getKey(), ROLE_SERVICE_MANAGER); byte[] data = bs.getCustomerBillingData(Long.valueOf(start1), null, Arrays.asList(new String[] { customer.getOrganizationId() })); String result = new String(data, "UTF-8"); Assert.assertTrue(result, result.contains(getBillingXml(start1, end1, null))); Assert.assertTrue(result, result.contains(getBillingXml(start2, end2, null))); } @Test public void testBug7414_StartEqualsPeriodTwoStart() throws Exception { container.login(supplierUser.getKey(), ROLE_SERVICE_MANAGER); byte[] data = bs.getCustomerBillingData(Long.valueOf(start2), null, Arrays.asList(new String[] { customer.getOrganizationId() })); String result = new String(data, "UTF-8"); Assert.assertFalse(result, result.contains(getBillingXml(start1, end1, null))); Assert.assertTrue(result, result.contains(getBillingXml(start2, end2, null))); } @Test public void testBug7414_EndEqualsPeriodOneEnd() throws Exception { container.login(supplierUser.getKey(), ROLE_SERVICE_MANAGER); byte[] data = bs.getCustomerBillingData(null, Long.valueOf(end1), Arrays.asList(new String[] { customer.getOrganizationId() })); String result = new String(data, "UTF-8"); Assert.assertTrue(result, result.contains(getBillingXml(start1, end1, null))); Assert.assertFalse(result, result.contains(getBillingXml(start2, end2, null))); } @Test public void testBug7414_LimitsEqualPeriodOne() throws Exception { container.login(supplierUser.getKey(), ROLE_SERVICE_MANAGER); byte[] data = bs.getCustomerBillingData(Long.valueOf(start1), Long.valueOf(end1), Arrays.asList(new String[] { customer.getOrganizationId() })); String result = new String(data, "UTF-8"); Assert.assertTrue(result, result.contains(getBillingXml(start1, end1, null))); Assert.assertFalse(result, result.contains(getBillingXml(start2, end2, null))); } @Test public void testBug7414_LimitsEqualPeriodTwo() throws Exception { container.login(supplierUser.getKey(), ROLE_SERVICE_MANAGER); byte[] data = bs.getCustomerBillingData(Long.valueOf(start2), Long.valueOf(end2), Arrays.asList(new String[] { customer.getOrganizationId() })); String result = new String(data, "UTF-8"); Assert.assertFalse(result, result.contains(getBillingXml(start1, end1, null))); Assert.assertTrue(result, result.contains(getBillingXml(start2, end2, null))); } @Test public void testBug9088_LimitsChargingOrg() throws Exception { container.login(supplier2User.getKey(), ROLE_SERVICE_MANAGER); byte[] data = bs.getCustomerBillingData(null, null, null); String result = new String(data, "UTF-8"); Assert.assertFalse(result, result.contains(getBillingXml(start1, end1, null))); Assert.assertFalse(result, result.contains(getBillingXml(start2, end2, null))); Assert.assertTrue(result, result.contains(getBillingXml(start2, end2, supplier2))); } @Test public void testBug9088_LimitsChargingOrgForSpecificCustomer() throws Exception { container.login(supplier2User.getKey(), ROLE_SERVICE_MANAGER); byte[] data = bs.getCustomerBillingData(null, null, Arrays.asList(new String[] { customer.getOrganizationId() })); String result = new String(data, "UTF-8"); Assert.assertFalse(result, result.contains(getBillingXml(start1, end1, null))); Assert.assertFalse(result, result.contains(getBillingXml(start2, end2, null))); Assert.assertTrue(result, result.contains(getBillingXml(start2, end2, supplier2))); } private static final BillingResult createBillingData(DataService mgr, long orgKey, long periodStartTime, long periodEndTime, String xml, long charchingOrgKey, SupportedCurrency currency_EUR) throws NonUniqueBusinessKeyException { BillingResult br = new BillingResult(); br.setCreationTime(System.currentTimeMillis()); br.setOrganizationTKey(orgKey); br.setPeriodStartTime(periodStartTime); br.setPeriodEndTime(periodEndTime); br.setResultXML(xml); br.setChargingOrgKey(charchingOrgKey); br.setCurrency(currency_EUR); br.setGrossAmount(GROSS_REVENUE); br.setNetAmount(NET_REVENUE); mgr.persist(br); mgr.flush(); return br; } private static final long getDate(int year, int month, int date) { Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT")); cal.set(year, month, date, 0, 0, 0); return cal.getTimeInMillis(); } private static final String getBillingXml(long start, long end, Organization supplier) { StringBuffer buffer = new StringBuffer(); buffer.append("<BillingDetails>"); buffer.append("<Period endDate=\""); buffer.append(start); buffer.append("\" startDate=\""); buffer.append(end); buffer.append("\" />"); if (supplier != null) { buffer.append("<Subscriptions>"); buffer.append("<Subscription id=\""); buffer.append(Long.toString(supplier.getKey())); buffer.append("\" />"); buffer.append("</Subscriptions>"); } buffer.append("</BillingDetails>"); return buffer.toString(); } }
/* * JBoss, Home of Professional Open Source. * Copyright 2012, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.keycloak.testsuite.oauth; import org.junit.Assert; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.keycloak.OAuth2Constants; import org.keycloak.common.constants.KerberosConstants; import org.keycloak.events.Details; import org.keycloak.events.Event; import org.keycloak.events.EventType; import org.keycloak.models.ClientModel; import org.keycloak.models.ProtocolMapperModel; import org.keycloak.models.RealmModel; import org.keycloak.models.RoleModel; import org.keycloak.models.UserModel; import org.keycloak.protocol.oidc.OIDCLoginProtocol; import org.keycloak.protocol.oidc.mappers.UserSessionNoteMapper; import org.keycloak.representations.AccessToken; import org.keycloak.services.managers.RealmManager; import org.keycloak.testsuite.AssertEvents; import org.keycloak.testsuite.OAuthClient; import org.keycloak.testsuite.pages.AccountApplicationsPage; import org.keycloak.testsuite.pages.AppPage; import org.keycloak.testsuite.pages.LoginPage; import org.keycloak.testsuite.pages.OAuthGrantPage; import org.keycloak.testsuite.rule.KeycloakRule; import org.keycloak.testsuite.rule.WebResource; import org.keycloak.testsuite.rule.WebRule; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import java.util.Map; import static org.junit.Assert.assertEquals; /** * @author <a href="mailto:vrockai@redhat.com">Viliam Rockai</a> */ public class OAuthGrantTest { @ClassRule public static KeycloakRule keycloakRule = new KeycloakRule(); @Rule public AssertEvents events = new AssertEvents(keycloakRule); @Rule public WebRule webRule = new WebRule(this); @WebResource protected WebDriver driver; @WebResource protected OAuthClient oauth; @WebResource protected LoginPage loginPage; @WebResource protected OAuthGrantPage grantPage; @WebResource protected AccountApplicationsPage accountAppsPage; @WebResource protected AppPage appPage; private static String ROLE_USER = "Have User privileges"; private static String ROLE_CUSTOMER = "Have Customer User privileges"; @Test public void oauthGrantAcceptTest() { oauth.clientId("third-party"); oauth.doLoginGrant("test-user@localhost", "password"); grantPage.assertCurrent(); Assert.assertTrue(driver.getPageSource().contains(ROLE_USER)); Assert.assertTrue(driver.getPageSource().contains(ROLE_CUSTOMER)); grantPage.accept(); Assert.assertTrue(oauth.getCurrentQuery().containsKey(OAuth2Constants.CODE)); Event loginEvent = events.expectLogin() .client("third-party") .detail(Details.CONSENT, Details.CONSENT_VALUE_CONSENT_GRANTED) .assertEvent(); String codeId = loginEvent.getDetails().get(Details.CODE_ID); String sessionId = loginEvent.getSessionId(); OAuthClient.AccessTokenResponse accessToken = oauth.doAccessTokenRequest(oauth.getCurrentQuery().get(OAuth2Constants.CODE), "password"); String tokenString = accessToken.getAccessToken(); Assert.assertNotNull(tokenString); AccessToken token = oauth.verifyToken(tokenString); assertEquals(sessionId, token.getSessionState()); AccessToken.Access realmAccess = token.getRealmAccess(); assertEquals(1, realmAccess.getRoles().size()); Assert.assertTrue(realmAccess.isUserInRole("user")); Map<String,AccessToken.Access> resourceAccess = token.getResourceAccess(); assertEquals(1, resourceAccess.size()); assertEquals(1, resourceAccess.get("test-app").getRoles().size()); Assert.assertTrue(resourceAccess.get("test-app").isUserInRole("customer-user")); events.expectCodeToToken(codeId, loginEvent.getSessionId()).client("third-party").assertEvent(); accountAppsPage.open(); assertEquals(1, driver.findElements(By.id("revoke-third-party")).size()); accountAppsPage.revokeGrant("third-party"); events.expect(EventType.REVOKE_GRANT) .client("account").detail(Details.REVOKED_CLIENT, "third-party").assertEvent(); assertEquals(0, driver.findElements(By.id("revoke-third-party")).size()); } @Test public void oauthGrantCancelTest() { oauth.clientId("third-party"); oauth.doLoginGrant("test-user@localhost", "password"); grantPage.assertCurrent(); Assert.assertTrue(driver.getPageSource().contains(ROLE_USER)); Assert.assertTrue(driver.getPageSource().contains(ROLE_CUSTOMER)); grantPage.cancel(); Assert.assertTrue(oauth.getCurrentQuery().containsKey(OAuth2Constants.ERROR)); assertEquals("access_denied", oauth.getCurrentQuery().get(OAuth2Constants.ERROR)); events.expectLogin() .client("third-party") .error("rejected_by_user") .removeDetail(Details.CONSENT) .assertEvent(); } @Test public void oauthGrantNotShownWhenAlreadyGranted() { // Grant permissions on grant screen oauth.clientId("third-party"); oauth.doLoginGrant("test-user@localhost", "password"); grantPage.assertCurrent(); grantPage.accept(); events.expectLogin() .client("third-party") .detail(Details.CONSENT, Details.CONSENT_VALUE_CONSENT_GRANTED) .assertEvent(); // Assert permissions granted on Account mgmt. applications page accountAppsPage.open(); AccountApplicationsPage.AppEntry thirdPartyEntry = accountAppsPage.getApplications().get("third-party"); Assert.assertTrue(thirdPartyEntry.getRolesGranted().contains(ROLE_USER)); Assert.assertTrue(thirdPartyEntry.getRolesGranted().contains("Have Customer User privileges in test-app")); Assert.assertTrue(thirdPartyEntry.getProtocolMappersGranted().contains("Full name")); Assert.assertTrue(thirdPartyEntry.getProtocolMappersGranted().contains("Email")); // Open login form and assert grantPage not shown oauth.openLoginForm(); appPage.assertCurrent(); events.expectLogin() .detail(Details.AUTH_METHOD, OIDCLoginProtocol.LOGIN_PROTOCOL) .detail(Details.CONSENT, Details.CONSENT_VALUE_PERSISTED_CONSENT) .removeDetail(Details.USERNAME) .client("third-party").assertEvent(); // Revoke grant in account mgmt. accountAppsPage.open(); accountAppsPage.revokeGrant("third-party"); events.expect(EventType.REVOKE_GRANT) .client("account").detail(Details.REVOKED_CLIENT, "third-party").assertEvent(); // Open login form again and assert grant Page is shown oauth.openLoginForm(); grantPage.assertCurrent(); Assert.assertTrue(driver.getPageSource().contains(ROLE_USER)); Assert.assertTrue(driver.getPageSource().contains(ROLE_CUSTOMER)); } @Test public void oauthGrantAddAnotherRoleAndMapper() { // Grant permissions on grant screen oauth.clientId("third-party"); oauth.doLoginGrant("test-user@localhost", "password"); // Add new protocolMapper and role before showing grant page keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { ProtocolMapperModel protocolMapper = UserSessionNoteMapper.createClaimMapper(KerberosConstants.GSS_DELEGATION_CREDENTIAL_DISPLAY_NAME, KerberosConstants.GSS_DELEGATION_CREDENTIAL, KerberosConstants.GSS_DELEGATION_CREDENTIAL, "String", true, KerberosConstants.GSS_DELEGATION_CREDENTIAL_DISPLAY_NAME, true, false); ClientModel thirdPartyApp = appRealm.getClientByClientId("third-party"); thirdPartyApp.addProtocolMapper(protocolMapper); RoleModel newRole = appRealm.addRole("new-role"); thirdPartyApp.addScopeMapping(newRole); UserModel testUser = manager.getSession().users().getUserByUsername("test-user@localhost", appRealm); testUser.grantRole(newRole); } }); // Confirm grant page grantPage.assertCurrent(); grantPage.accept(); events.expectLogin() .client("third-party") .detail(Details.CONSENT, Details.CONSENT_VALUE_CONSENT_GRANTED) .assertEvent(); // Assert new role and protocol mapper not in account mgmt. accountAppsPage.open(); AccountApplicationsPage.AppEntry appEntry = accountAppsPage.getApplications().get("third-party"); Assert.assertFalse(appEntry.getRolesGranted().contains("new-role")); Assert.assertFalse(appEntry.getProtocolMappersGranted().contains(KerberosConstants.GSS_DELEGATION_CREDENTIAL_DISPLAY_NAME)); // Show grant page another time. Just new role and protocol mapper are on the page oauth.openLoginForm(); grantPage.assertCurrent(); Assert.assertFalse(driver.getPageSource().contains(ROLE_USER)); Assert.assertFalse(driver.getPageSource().contains("Full name")); Assert.assertTrue(driver.getPageSource().contains("new-role")); Assert.assertTrue(driver.getPageSource().contains(KerberosConstants.GSS_DELEGATION_CREDENTIAL_DISPLAY_NAME)); grantPage.accept(); events.expectLogin() .client("third-party") .detail(Details.CONSENT, Details.CONSENT_VALUE_CONSENT_GRANTED) .assertEvent(); // Go to account mgmt. Everything is granted now accountAppsPage.open(); appEntry = accountAppsPage.getApplications().get("third-party"); Assert.assertTrue(appEntry.getRolesGranted().contains("new-role")); Assert.assertTrue(appEntry.getProtocolMappersGranted().contains(KerberosConstants.GSS_DELEGATION_CREDENTIAL_DISPLAY_NAME)); // Revoke accountAppsPage.revokeGrant("third-party"); events.expect(EventType.REVOKE_GRANT) .client("account").detail(Details.REVOKED_CLIENT, "third-party").assertEvent(); // Cleanup keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { ClientModel thirdPartyApp = appRealm.getClientByClientId("third-party"); ProtocolMapperModel gssMapper = thirdPartyApp.getProtocolMapperByName(OIDCLoginProtocol.LOGIN_PROTOCOL, KerberosConstants.GSS_DELEGATION_CREDENTIAL_DISPLAY_NAME); thirdPartyApp.removeProtocolMapper(gssMapper); RoleModel newRole = appRealm.getRole("new-role"); appRealm.removeRole(newRole); } }); } @Test public void oauthGrantScopeParamRequired() throws Exception { keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { ClientModel thirdParty = appRealm.getClientByClientId("third-party"); RoleModel barAppRole = thirdParty.addRole("bar-role"); barAppRole.setScopeParamRequired(true); RoleModel fooRole = appRealm.addRole("foo-role"); fooRole.setScopeParamRequired(true); thirdParty.addScopeMapping(fooRole); UserModel testUser = manager.getSession().users().getUserByUsername("test-user@localhost", appRealm); testUser.grantRole(fooRole); testUser.grantRole(barAppRole); } }); // Assert roles not on grant screen when not requested oauth.clientId("third-party"); oauth.doLoginGrant("test-user@localhost", "password"); grantPage.assertCurrent(); Assert.assertFalse(driver.getPageSource().contains("foo-role")); Assert.assertFalse(driver.getPageSource().contains("bar-role")); grantPage.cancel(); events.expectLogin() .client("third-party") .error("rejected_by_user") .removeDetail(Details.CONSENT) .assertEvent(); oauth.scope("foo-role third-party/bar-role"); oauth.doLoginGrant("test-user@localhost", "password"); grantPage.assertCurrent(); Assert.assertTrue(driver.getPageSource().contains("foo-role")); Assert.assertTrue(driver.getPageSource().contains("bar-role")); grantPage.accept(); events.expectLogin() .client("third-party") .detail(Details.CONSENT, Details.CONSENT_VALUE_CONSENT_GRANTED) .assertEvent(); // Revoke accountAppsPage.open(); accountAppsPage.revokeGrant("third-party"); events.expect(EventType.REVOKE_GRANT) .client("account").detail(Details.REVOKED_CLIENT, "third-party").assertEvent(); // cleanup keycloakRule.update(new KeycloakRule.KeycloakSetup() { @Override public void config(RealmManager manager, RealmModel adminstrationRealm, RealmModel appRealm) { appRealm.removeRole(appRealm.getRole("foo-role")); ClientModel thirdparty = appRealm.getClientByClientId("third-party"); thirdparty.removeRole(thirdparty.getRole("bar-role")); } }); } }
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.command.impl; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import org.drools.core.command.NewKnowledgeBuilderConfigurationCommand; import org.drools.core.command.runtime.AdvanceSessionTimeCommand; import org.drools.core.command.runtime.BatchExecutionCommandImpl; import org.drools.core.command.runtime.DisposeCommand; import org.drools.core.command.runtime.GetGlobalCommand; import org.drools.core.command.runtime.GetSessionTimeCommand; import org.drools.core.command.runtime.KBuilderSetPropertyCommand; import org.drools.core.command.runtime.SetGlobalCommand; import org.drools.core.command.runtime.process.AbortWorkItemCommand; import org.drools.core.command.runtime.process.CompleteWorkItemCommand; import org.drools.core.command.runtime.process.RegisterWorkItemHandlerCommand; import org.drools.core.command.runtime.process.SignalEventCommand; import org.drools.core.command.runtime.process.StartProcessCommand; import org.drools.core.command.runtime.rule.AgendaGroupSetFocusCommand; import org.drools.core.command.runtime.rule.ClearActivationGroupCommand; import org.drools.core.command.runtime.rule.ClearAgendaCommand; import org.drools.core.command.runtime.rule.ClearAgendaGroupCommand; import org.drools.core.command.runtime.rule.ClearRuleFlowGroupCommand; import org.drools.core.command.runtime.rule.DeleteCommand; import org.drools.core.command.runtime.rule.DeleteObjectCommand; import org.drools.core.command.runtime.rule.EnableAuditLogCommand; import org.drools.core.command.runtime.rule.FireAllRulesCommand; import org.drools.core.command.runtime.rule.FromExternalFactHandleCommand; import org.drools.core.command.runtime.rule.GetFactHandleCommand; import org.drools.core.command.runtime.rule.GetFactHandleInEntryPointCommand; import org.drools.core.command.runtime.rule.GetFactHandlesCommand; import org.drools.core.command.runtime.rule.GetObjectCommand; import org.drools.core.command.runtime.rule.GetObjectsCommand; import org.drools.core.command.runtime.rule.InsertElementsCommand; import org.drools.core.command.runtime.rule.InsertObjectCommand; import org.drools.core.command.runtime.rule.ModifyCommand; import org.drools.core.command.runtime.rule.ModifyCommand.SetterImpl; import org.drools.core.command.runtime.rule.QueryCommand; import org.kie.api.command.BatchExecutionCommand; import org.kie.api.command.Command; import org.kie.api.command.Setter; import org.kie.api.runtime.ObjectFilter; import org.kie.api.runtime.process.WorkItemHandler; import org.kie.api.runtime.rule.FactHandle; import org.kie.internal.command.ExtendedKieCommands; public class CommandFactoryServiceImpl implements ExtendedKieCommands { public Command newGetGlobal(String identifier) { return new GetGlobalCommand(identifier); } public Command newGetGlobal(String identifier, String outIdentifier) { GetGlobalCommand cmd = new GetGlobalCommand(identifier); cmd.setOutIdentifier(outIdentifier); return cmd; } public Command newDispose() { return new DisposeCommand(); } public Command newInsertElements(Iterable objects) { return new InsertElementsCommand( i2c(objects) ); } public Command newInsertElements(Iterable objects, String outIdentifier) { InsertElementsCommand cmd = new InsertElementsCommand( i2c(objects) ); cmd.setOutIdentifier(outIdentifier); return cmd; } public Command newInsertElements(Iterable objects, String outIdentifier, boolean returnObject, String entryPoint) { InsertElementsCommand cmd = new InsertElementsCommand( i2c(objects) ); cmd.setEntryPoint( entryPoint ); cmd.setOutIdentifier( outIdentifier ); cmd.setReturnObject( returnObject ); return cmd; } private Collection i2c(Iterable i) { if (i instanceof Collection) { return (Collection) i; } Collection c = new ArrayList(); for (Object o : i) { c.add(o); } return c; } public Command newInsert(Object object) { return new InsertObjectCommand(object); } public Command newInsert(Object object, String outIdentifier) { InsertObjectCommand cmd = new InsertObjectCommand(object); cmd.setOutIdentifier( outIdentifier ); return cmd; } public Command newInsert(Object object, String outIdentifier, boolean returnObject, String entryPoint) { InsertObjectCommand cmd = new InsertObjectCommand(object); cmd.setOutIdentifier(outIdentifier); cmd.setEntryPoint( entryPoint ); cmd.setReturnObject( returnObject ); return cmd; } public Command newDelete(FactHandle factHandle) { return new DeleteCommand( factHandle ); } public Command newDeleteObject(Object object,String entryPoint) { return new DeleteObjectCommand( object, entryPoint ); } public Setter newSetter(String accessor, String value) { return new SetterImpl(accessor, value); } public Command newModify(FactHandle factHandle, List<Setter> setters) { return new ModifyCommand(factHandle, setters); } public Command newGetObject(FactHandle factHandle) { return new GetObjectCommand(factHandle); } public Command newGetObject(FactHandle factHandle, String outIdentifier) { return new GetObjectCommand(factHandle, outIdentifier); } public Command newGetObjects() { return newGetObjects((ObjectFilter) null); } public Command newGetObjects(String outIdentifier) { return newGetObjects(null, outIdentifier); } public Command newGetObjects(ObjectFilter filter) { return new GetObjectsCommand(filter); } public Command newGetObjects(ObjectFilter filter, String outIdentifier) { return new GetObjectsCommand(filter, outIdentifier); } public Command newSetGlobal(String identifier, Object object) { return new SetGlobalCommand(identifier, object); } public Command newSetGlobal(String identifier, Object object, boolean out) { if (out) { return newSetGlobal(identifier, object, identifier); } else { return newSetGlobal(identifier, object); } } public Command newSetGlobal(String identifier, Object object, String outIdentifier) { SetGlobalCommand cmd = new SetGlobalCommand(identifier, object); cmd.setOutIdentifier(outIdentifier); return cmd; } public Command newFireAllRules() { return new FireAllRulesCommand(); } public Command newFireAllRules(int max) { return new FireAllRulesCommand(max); } public Command newFireAllRules(String outidentifier) { return new FireAllRulesCommand(outidentifier); } @Override public Command newGetFactHandle( Object object ) { return new GetFactHandleCommand( object ); } @Override public Command newGetFactHandleInEntryPoint( Object object, String entryPoint ) { return new GetFactHandleInEntryPointCommand( object, entryPoint ); } public Command newStartProcess(String processId) { StartProcessCommand startProcess = new StartProcessCommand(); startProcess.setProcessId(processId); return startProcess; } public Command newStartProcess(String processId, Map<String, Object> parameters) { StartProcessCommand startProcess = new StartProcessCommand(); startProcess.setProcessId(processId); startProcess.setParameters(parameters); return startProcess; } public Command newSignalEvent(String type, Object event) { return new SignalEventCommand( type, event ); } public Command newSignalEvent(long processInstanceId, String type, Object event) { return new SignalEventCommand( processInstanceId, type, event ); } public Command newCompleteWorkItem(long workItemId, Map<String, Object> results) { return new CompleteWorkItemCommand(workItemId, results); } public Command newAbortWorkItem(long workItemId) { return new AbortWorkItemCommand( workItemId); } public Command newRegisterWorkItemHandlerCommand(WorkItemHandler handler, String workItemName) { return new RegisterWorkItemHandlerCommand( workItemName, handler ); } public Command newQuery(String identifier, String name) { return new QueryCommand(identifier, name, null); } public Command newQuery(String identifier, String name, Object[] arguments) { return new QueryCommand(identifier, name, arguments); } public BatchExecutionCommand newBatchExecution(List<? extends Command> commands) { return newBatchExecution( commands, null ); } public BatchExecutionCommand newBatchExecution(List<? extends Command> commands, String lookup) { return new BatchExecutionCommandImpl( commands, lookup ); } @Deprecated public Command newKBuilderSetPropertyCommand(String id, String name, String value) { return new KBuilderSetPropertyCommand(id, name, value); } public Command newKnowledgeBuilderSetPropertyCommand(String id, String name, String value) { return new KBuilderSetPropertyCommand(id, name, value); } public Command newNewKnowledgeBuilderConfigurationCommand(String localId){ return new NewKnowledgeBuilderConfigurationCommand(localId); } public Command<FactHandle> fromExternalFactHandleCommand(String factHandleExternalForm) { return fromExternalFactHandleCommand(factHandleExternalForm, false); } public Command<FactHandle> fromExternalFactHandleCommand(String factHandleExternalForm, boolean disconnected) { return new FromExternalFactHandleCommand(factHandleExternalForm, disconnected); } public Command newAgendaGroupSetFocus(String name) { return new AgendaGroupSetFocusCommand(name); } @Override public Command newGetFactHandles() { return new GetFactHandlesCommand(); } @Override public Command newGetFactHandles(String outIdentifier) { GetFactHandlesCommand factHandlesCommand = new GetFactHandlesCommand(); factHandlesCommand.setOutIdentifier(outIdentifier); return factHandlesCommand; } @Override public Command newGetFactHandles(ObjectFilter filter) { return new GetFactHandlesCommand(filter); } @Override public Command newGetFactHandles(ObjectFilter filter, String outIdentifier) { GetFactHandlesCommand factHandlesCommand = new GetFactHandlesCommand(filter); factHandlesCommand.setOutIdentifier(outIdentifier); return factHandlesCommand; } public Command newClearActivationGroup(String name) { return new ClearActivationGroupCommand(name); } public Command newClearAgenda() { return new ClearAgendaCommand(); } public Command newClearAgendaGroup(String name) { return new ClearAgendaGroupCommand(name); } public Command newClearRuleFlowGroup(String name) { return new ClearRuleFlowGroupCommand(name); } @Override public Command newEnableAuditLog( String directory, String filename ) { return new EnableAuditLogCommand( directory, filename ); } @Override public Command newEnableAuditLog( String filename ) { return new EnableAuditLogCommand( null, filename ); } @Override public Command<Long> newGetSessionTime() { return new GetSessionTimeCommand(); } @Override public Command<Long> newGetSessionTime(String outIdentifier) { return new GetSessionTimeCommand(outIdentifier); } @Override public Command<Long> newAdvanceSessionTime(long amount, TimeUnit unit) { return new AdvanceSessionTimeCommand( amount, unit ); } @Override public Command<Long> newAdvanceSessionTime(long amount, TimeUnit unit, String outIdentifier) { return new AdvanceSessionTimeCommand( outIdentifier, amount, unit ); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.cleaner; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.SnapshotType; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.snapshot.DisabledTableSnapshotHandler; import org.apache.hadoop.hbase.master.snapshot.SnapshotHFileCleaner; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; import org.apache.hadoop.hbase.regionserver.CompactedHFilesDischarger; import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils; import org.apache.hadoop.hbase.snapshot.UnknownSnapshotException; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.util.concurrent.Uninterruptibles; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos .IsSnapshotCleanupEnabledRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos .IsSnapshotCleanupEnabledResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos .SetSnapshotCleanupRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; /** * Test the master-related aspects of a snapshot */ @Category({MasterTests.class, MediumTests.class}) public class TestSnapshotFromMaster { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSnapshotFromMaster.class); private static final Logger LOG = LoggerFactory.getLogger(TestSnapshotFromMaster.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final int NUM_RS = 2; private static Path rootDir; private static FileSystem fs; private static HMaster master; // for hfile archiving test. private static Path archiveDir; private static final byte[] TEST_FAM = Bytes.toBytes("fam"); private static final TableName TABLE_NAME = TableName.valueOf("test"); // refresh the cache every 1/2 second private static final long cacheRefreshPeriod = 500; private static final int blockingStoreFiles = 12; /** * Setup the config for the cluster */ @BeforeClass public static void setupCluster() throws Exception { setupConf(UTIL.getConfiguration()); UTIL.startMiniCluster(NUM_RS); fs = UTIL.getDFSCluster().getFileSystem(); master = UTIL.getMiniHBaseCluster().getMaster(); rootDir = master.getMasterFileSystem().getRootDir(); archiveDir = new Path(rootDir, HConstants.HFILE_ARCHIVE_DIRECTORY); } private static void setupConf(Configuration conf) { // disable the ui conf.setInt("hbase.regionsever.info.port", -1); // change the flush size to a small amount, regulating number of store files conf.setInt("hbase.hregion.memstore.flush.size", 25000); // so make sure we get a compaction when doing a load, but keep around some // files in the store conf.setInt("hbase.hstore.compaction.min", 2); conf.setInt("hbase.hstore.compactionThreshold", 5); // block writes if we get to 12 store files conf.setInt("hbase.hstore.blockingStoreFiles", blockingStoreFiles); // Ensure no extra cleaners on by default (e.g. TimeToLiveHFileCleaner) conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, ""); conf.set(HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS, ""); // Enable snapshot conf.setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true); conf.setLong(SnapshotManager.HBASE_SNAPSHOT_SENTINELS_CLEANUP_TIMEOUT_MILLIS, 3 * 1000L); conf.setLong(SnapshotHFileCleaner.HFILE_CACHE_REFRESH_PERIOD_CONF_KEY, cacheRefreshPeriod); conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, ConstantSizeRegionSplitPolicy.class.getName()); conf.setInt("hbase.hfile.compactions.cleaner.interval", 20 * 1000); conf.setInt("hbase.master.cleaner.snapshot.interval", 500); } @Before public void setup() throws Exception { UTIL.createTable(TABLE_NAME, TEST_FAM); master.getSnapshotManager().setSnapshotHandlerForTesting(TABLE_NAME, null); } @After public void tearDown() throws Exception { UTIL.deleteTable(TABLE_NAME); SnapshotTestingUtils.deleteAllSnapshots(UTIL.getAdmin()); SnapshotTestingUtils.deleteArchiveDirectory(UTIL); } @AfterClass public static void cleanupTest() throws Exception { try { UTIL.shutdownMiniCluster(); } catch (Exception e) { // NOOP; } } /** * Test that the contract from the master for checking on a snapshot are valid. * <p> * <ol> * <li>If a snapshot fails with an error, we expect to get the source error.</li> * <li>If there is no snapshot name supplied, we should get an error.</li> * <li>If asking about a snapshot has hasn't occurred, you should get an error.</li> * </ol> */ @Test public void testIsDoneContract() throws Exception { IsSnapshotDoneRequest.Builder builder = IsSnapshotDoneRequest.newBuilder(); String snapshotName = "asyncExpectedFailureTest"; // check that we get an exception when looking up snapshot where one hasn't happened SnapshotTestingUtils.expectSnapshotDoneException(master, builder.build(), UnknownSnapshotException.class); // and that we get the same issue, even if we specify a name SnapshotDescription desc = SnapshotDescription.newBuilder() .setName(snapshotName).setTable(TABLE_NAME.getNameAsString()).build(); builder.setSnapshot(desc); SnapshotTestingUtils.expectSnapshotDoneException(master, builder.build(), UnknownSnapshotException.class); // set a mock handler to simulate a snapshot DisabledTableSnapshotHandler mockHandler = Mockito.mock(DisabledTableSnapshotHandler.class); Mockito.when(mockHandler.getException()).thenReturn(null); Mockito.when(mockHandler.getSnapshot()).thenReturn(desc); Mockito.when(mockHandler.isFinished()).thenReturn(Boolean.TRUE); Mockito.when(mockHandler.getCompletionTimestamp()) .thenReturn(EnvironmentEdgeManager.currentTime()); master.getSnapshotManager() .setSnapshotHandlerForTesting(TABLE_NAME, mockHandler); // if we do a lookup without a snapshot name, we should fail - you should always know your name builder = IsSnapshotDoneRequest.newBuilder(); SnapshotTestingUtils.expectSnapshotDoneException(master, builder.build(), UnknownSnapshotException.class); // then do the lookup for the snapshot that it is done builder.setSnapshot(desc); IsSnapshotDoneResponse response = master.getMasterRpcServices().isSnapshotDone(null, builder.build()); assertTrue("Snapshot didn't complete when it should have.", response.getDone()); // now try the case where we are looking for a snapshot we didn't take builder.setSnapshot(SnapshotDescription.newBuilder().setName("Not A Snapshot").build()); SnapshotTestingUtils.expectSnapshotDoneException(master, builder.build(), UnknownSnapshotException.class); // then create a snapshot to the fs and make sure that we can find it when checking done snapshotName = "completed"; desc = createSnapshot(snapshotName); builder.setSnapshot(desc); response = master.getMasterRpcServices().isSnapshotDone(null, builder.build()); assertTrue("Completed, on-disk snapshot not found", response.getDone()); } @Test public void testGetCompletedSnapshots() throws Exception { // first check when there are no snapshots GetCompletedSnapshotsRequest request = GetCompletedSnapshotsRequest.newBuilder().build(); GetCompletedSnapshotsResponse response = master.getMasterRpcServices().getCompletedSnapshots(null, request); assertEquals("Found unexpected number of snapshots", 0, response.getSnapshotsCount()); // write one snapshot to the fs String snapshotName = "completed"; SnapshotDescription snapshot = createSnapshot(snapshotName); // check that we get one snapshot response = master.getMasterRpcServices().getCompletedSnapshots(null, request); assertEquals("Found unexpected number of snapshots", 1, response.getSnapshotsCount()); List<SnapshotDescription> snapshots = response.getSnapshotsList(); List<SnapshotDescription> expected = Lists.newArrayList(snapshot); assertEquals("Returned snapshots don't match created snapshots", expected, snapshots); // write a second snapshot snapshotName = "completed_two"; snapshot = createSnapshot(snapshotName); expected.add(snapshot); // check that we get one snapshot response = master.getMasterRpcServices().getCompletedSnapshots(null, request); assertEquals("Found unexpected number of snapshots", 2, response.getSnapshotsCount()); snapshots = response.getSnapshotsList(); assertEquals("Returned snapshots don't match created snapshots", expected, snapshots); } @Test public void testDeleteSnapshot() throws Exception { String snapshotName = "completed"; SnapshotDescription snapshot = SnapshotDescription.newBuilder().setName(snapshotName).build(); DeleteSnapshotRequest request = DeleteSnapshotRequest.newBuilder().setSnapshot(snapshot) .build(); try { master.getMasterRpcServices().deleteSnapshot(null, request); fail("Master didn't throw exception when attempting to delete snapshot that doesn't exist"); } catch (org.apache.hbase.thirdparty.com.google.protobuf.ServiceException e) { // Expected } // write one snapshot to the fs createSnapshot(snapshotName); // then delete the existing snapshot,which shouldn't cause an exception to be thrown master.getMasterRpcServices().deleteSnapshot(null, request); } @Test public void testGetCompletedSnapshotsWithCleanup() throws Exception { // Enable auto snapshot cleanup for the cluster SetSnapshotCleanupRequest setSnapshotCleanupRequest = SetSnapshotCleanupRequest.newBuilder().setEnabled(true).build(); master.getMasterRpcServices().switchSnapshotCleanup(null, setSnapshotCleanupRequest); // first check when there are no snapshots GetCompletedSnapshotsRequest request = GetCompletedSnapshotsRequest.newBuilder().build(); GetCompletedSnapshotsResponse response = master.getMasterRpcServices().getCompletedSnapshots(null, request); assertEquals("Found unexpected number of snapshots", 0, response.getSnapshotsCount()); // NOTE: This is going to be flakey. Its timing based. For now made it more coarse // so more likely to pass though we have to hang around longer. // write one snapshot to the fs createSnapshotWithTtl("snapshot_01", 5L); createSnapshotWithTtl("snapshot_02", 100L); // check that we get one snapshot response = master.getMasterRpcServices().getCompletedSnapshots(null, request); assertEquals("Found unexpected number of snapshots", 2, response.getSnapshotsCount()); // Check that 1 snapshot is auto cleaned after 5 sec of TTL expiration. Wait 10 seconds // just in case. Uninterruptibles.sleepUninterruptibly(10, TimeUnit.SECONDS); response = master.getMasterRpcServices().getCompletedSnapshots(null, request); assertEquals("Found unexpected number of snapshots", 1, response.getSnapshotsCount()); } @Test public void testGetCompletedSnapshotsWithoutCleanup() throws Exception { // Disable auto snapshot cleanup for the cluster SetSnapshotCleanupRequest setSnapshotCleanupRequest = SetSnapshotCleanupRequest.newBuilder().setEnabled(false).build(); master.getMasterRpcServices().switchSnapshotCleanup(null, setSnapshotCleanupRequest); // first check when there are no snapshots GetCompletedSnapshotsRequest request = GetCompletedSnapshotsRequest.newBuilder().build(); GetCompletedSnapshotsResponse response = master.getMasterRpcServices().getCompletedSnapshots(null, request); assertEquals("Found unexpected number of snapshots", 0, response.getSnapshotsCount()); // write one snapshot to the fs createSnapshotWithTtl("snapshot_02", 1L); createSnapshotWithTtl("snapshot_03", 1L); // check that we get one snapshot response = master.getMasterRpcServices().getCompletedSnapshots(null, request); assertEquals("Found unexpected number of snapshots", 2, response.getSnapshotsCount()); // check that no snapshot is auto cleaned even after 1 sec of TTL expiration Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); response = master.getMasterRpcServices().getCompletedSnapshots(null, request); assertEquals("Found unexpected number of snapshots", 2, response.getSnapshotsCount()); } @Test public void testSnapshotCleanupStatus() throws Exception { // Enable auto snapshot cleanup for the cluster SetSnapshotCleanupRequest setSnapshotCleanupRequest = SetSnapshotCleanupRequest.newBuilder().setEnabled(true).build(); master.getMasterRpcServices().switchSnapshotCleanup(null, setSnapshotCleanupRequest); // Check if auto snapshot cleanup is enabled IsSnapshotCleanupEnabledRequest isSnapshotCleanupEnabledRequest = IsSnapshotCleanupEnabledRequest.newBuilder().build(); IsSnapshotCleanupEnabledResponse isSnapshotCleanupEnabledResponse = master.getMasterRpcServices().isSnapshotCleanupEnabled(null, isSnapshotCleanupEnabledRequest); Assert.assertTrue(isSnapshotCleanupEnabledResponse.getEnabled()); // Disable auto snapshot cleanup for the cluster setSnapshotCleanupRequest = SetSnapshotCleanupRequest.newBuilder() .setEnabled(false).build(); master.getMasterRpcServices().switchSnapshotCleanup(null, setSnapshotCleanupRequest); // Check if auto snapshot cleanup is disabled isSnapshotCleanupEnabledRequest = IsSnapshotCleanupEnabledRequest .newBuilder().build(); isSnapshotCleanupEnabledResponse = master.getMasterRpcServices().isSnapshotCleanupEnabled(null, isSnapshotCleanupEnabledRequest); Assert.assertFalse(isSnapshotCleanupEnabledResponse.getEnabled()); } /** * Test that the snapshot hfile archive cleaner works correctly. HFiles that are in snapshots * should be retained, while those that are not in a snapshot should be deleted. * @throws Exception on failure */ @Test public void testSnapshotHFileArchiving() throws Exception { Admin admin = UTIL.getAdmin(); // make sure we don't fail on listing snapshots SnapshotTestingUtils.assertNoSnapshots(admin); // recreate test table with disabled compactions; otherwise compaction may happen before // snapshot, the call after snapshot will be a no-op and checks will fail UTIL.deleteTable(TABLE_NAME); TableDescriptor td = TableDescriptorBuilder.newBuilder(TABLE_NAME) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAM)) .setCompactionEnabled(false) .build(); UTIL.getAdmin().createTable(td); // load the table for (int i = 0; i < blockingStoreFiles / 2; i ++) { UTIL.loadTable(UTIL.getConnection().getTable(TABLE_NAME), TEST_FAM); UTIL.flush(TABLE_NAME); } // disable the table so we can take a snapshot admin.disableTable(TABLE_NAME); // take a snapshot of the table String snapshotName = "snapshot"; String snapshotNameBytes = snapshotName; admin.snapshot(snapshotName, TABLE_NAME); LOG.info("After snapshot File-System state"); FSUtils.logFileSystemState(fs, rootDir, LOG); // ensure we only have one snapshot SnapshotTestingUtils.assertOneSnapshotThatMatches(admin, snapshotNameBytes, TABLE_NAME); td = TableDescriptorBuilder.newBuilder(td) .setCompactionEnabled(true) .build(); // enable compactions now admin.modifyTable(td); // renable the table so we can compact the regions admin.enableTable(TABLE_NAME); // compact the files so we get some archived files for the table we just snapshotted List<HRegion> regions = UTIL.getHBaseCluster().getRegions(TABLE_NAME); for (HRegion region : regions) { region.waitForFlushesAndCompactions(); // enable can trigger a compaction, wait for it. region.compactStores(); // min is 2 so will compact and archive } List<RegionServerThread> regionServerThreads = UTIL.getMiniHBaseCluster() .getRegionServerThreads(); HRegionServer hrs = null; for (RegionServerThread rs : regionServerThreads) { if (!rs.getRegionServer().getRegions(TABLE_NAME).isEmpty()) { hrs = rs.getRegionServer(); break; } } CompactedHFilesDischarger cleaner = new CompactedHFilesDischarger(100, null, hrs, false); cleaner.chore(); LOG.info("After compaction File-System state"); FSUtils.logFileSystemState(fs, rootDir, LOG); // make sure the cleaner has run LOG.debug("Running hfile cleaners"); ensureHFileCleanersRun(); LOG.info("After cleaners File-System state: " + rootDir); FSUtils.logFileSystemState(fs, rootDir, LOG); // get the snapshot files for the table Path snapshotTable = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, rootDir); Set<String> snapshotHFiles = SnapshotReferenceUtil.getHFileNames( UTIL.getConfiguration(), fs, snapshotTable); // check that the files in the archive contain the ones that we need for the snapshot LOG.debug("Have snapshot hfiles:"); for (String fileName : snapshotHFiles) { LOG.debug(fileName); } // get the archived files for the table Collection<String> archives = getHFiles(archiveDir, fs, TABLE_NAME); // get the hfiles for the table Collection<String> hfiles = getHFiles(rootDir, fs, TABLE_NAME); // and make sure that there is a proper subset for (String fileName : snapshotHFiles) { boolean exist = archives.contains(fileName) || hfiles.contains(fileName); assertTrue("Archived hfiles " + archives + " and table hfiles " + hfiles + " is missing snapshot file:" + fileName, exist); } // delete the existing snapshot admin.deleteSnapshot(snapshotNameBytes); SnapshotTestingUtils.assertNoSnapshots(admin); // make sure that we don't keep around the hfiles that aren't in a snapshot // make sure we wait long enough to refresh the snapshot hfile List<BaseHFileCleanerDelegate> delegates = UTIL.getMiniHBaseCluster().getMaster() .getHFileCleaner().cleanersChain; for (BaseHFileCleanerDelegate delegate: delegates) { if (delegate instanceof SnapshotHFileCleaner) { ((SnapshotHFileCleaner)delegate).getFileCacheForTesting().triggerCacheRefreshForTesting(); } } // run the cleaner again LOG.debug("Running hfile cleaners"); ensureHFileCleanersRun(); LOG.info("After delete snapshot cleaners run File-System state"); FSUtils.logFileSystemState(fs, rootDir, LOG); archives = getHFiles(archiveDir, fs, TABLE_NAME); assertEquals("Still have some hfiles in the archive, when their snapshot has been deleted.", 0, archives.size()); } /** * @return all the HFiles for a given table in the specified dir * @throws IOException on expected failure */ private final Collection<String> getHFiles(Path dir, FileSystem fs, TableName tableName) throws IOException { Path tableDir = FSUtils.getTableDir(dir, tableName); return SnapshotTestingUtils.listHFileNames(fs, tableDir); } /** * Make sure the {@link HFileCleaner HFileCleaners} run at least once */ private static void ensureHFileCleanersRun() { UTIL.getHBaseCluster().getMaster().getHFileCleaner().chore(); } private SnapshotDescription createSnapshot(final String snapshotName) throws IOException { SnapshotTestingUtils.SnapshotMock snapshotMock = new SnapshotTestingUtils.SnapshotMock(UTIL.getConfiguration(), fs, rootDir); SnapshotTestingUtils.SnapshotMock.SnapshotBuilder builder = snapshotMock.createSnapshotV2(snapshotName, "test", 0); builder.commit(); return builder.getSnapshotDescription(); } private SnapshotDescription createSnapshotWithTtl(final String snapshotName, final long ttl) throws IOException { SnapshotTestingUtils.SnapshotMock snapshotMock = new SnapshotTestingUtils.SnapshotMock(UTIL.getConfiguration(), fs, rootDir); SnapshotTestingUtils.SnapshotMock.SnapshotBuilder builder = snapshotMock.createSnapshotV2(snapshotName, "test", 0, ttl); builder.commit(); return builder.getSnapshotDescription(); } @Test public void testAsyncSnapshotWillNotBlockSnapshotHFileCleaner() throws Exception { // Write some data Table table = UTIL.getConnection().getTable(TABLE_NAME); for (int i = 0; i < 10; i++) { Put put = new Put(Bytes.toBytes(i)).addColumn(TEST_FAM, Bytes.toBytes("q"), Bytes.toBytes(i)); table.put(put); } String snapshotName = "testAsyncSnapshotWillNotBlockSnapshotHFileCleaner01"; Future<Void> future = UTIL.getAdmin().snapshotAsync(new org.apache.hadoop.hbase.client.SnapshotDescription( snapshotName, TABLE_NAME, SnapshotType.FLUSH)); Waiter.waitFor(UTIL.getConfiguration(), 10 * 1000L, 200L, () -> UTIL.getAdmin().listSnapshots(Pattern.compile(snapshotName)).size() == 1); UTIL.waitFor(30000, () -> !master.getSnapshotManager().isTakingAnySnapshot()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.model; import java.util.concurrent.ForkJoinPool; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import org.apache.camel.spi.Metadata; /** * Resilience4j Circuit Breaker EIP configuration */ @Metadata(label = "eip,routing,circuitbreaker") @XmlRootElement(name = "resilience4jConfiguration") @XmlAccessorType(XmlAccessType.FIELD) public class Resilience4jConfigurationDefinition extends Resilience4jConfigurationCommon { @XmlTransient private CircuitBreakerDefinition parent; public Resilience4jConfigurationDefinition() { } public Resilience4jConfigurationDefinition(CircuitBreakerDefinition parent) { this.parent = parent; } // Fluent API // ------------------------------------------------------------------------- /** * Refers to an existing io.github.resilience4j.circuitbreaker.CircuitBreaker instance * to lookup and use from the registry. When using this, then any other circuit breaker options * are not in use. */ public Resilience4jConfigurationDefinition circuitBreakerRef(String circuitBreakerRef) { setCircuitBreakerRef(circuitBreakerRef); return this; } /** * Refers to an existing io.github.resilience4j.circuitbreaker.CircuitBreakerConfig instance * to lookup and use from the registry. */ public Resilience4jConfigurationDefinition configRef(String ref) { setConfigRef(ref); return this; } /** * Configures the failure rate threshold in percentage. * If the failure rate is equal or greater than the threshold the CircuitBreaker transitions to open and starts short-circuiting calls. * <p> * The threshold must be greater than 0 and not greater than 100. Default value is 50 percentage. */ public Resilience4jConfigurationDefinition failureRateThreshold(Float failureRateThreshold) { setFailureRateThreshold(failureRateThreshold != null ? failureRateThreshold.toString() : null); return this; } /** * Configures the number of permitted calls when the CircuitBreaker is half open. * <p> * The size must be greater than 0. Default size is 10. */ public Resilience4jConfigurationDefinition permittedNumberOfCallsInHalfOpenState(Integer permittedNumberOfCallsInHalfOpenState) { setPermittedNumberOfCallsInHalfOpenState(permittedNumberOfCallsInHalfOpenState != null ? permittedNumberOfCallsInHalfOpenState.toString() : null); return this; } /** * Configures the size of the sliding window which is used to record the outcome of calls when the CircuitBreaker is closed. * {@code slidingWindowSize} configures the size of the sliding window. Sliding window can either be count-based or time-based. * * If {@code slidingWindowType} is COUNT_BASED, the last {@code slidingWindowSize} calls are recorded and aggregated. * If {@code slidingWindowType} is TIME_BASED, the calls of the last {@code slidingWindowSize} seconds are recorded and aggregated. * <p> * The {@code slidingWindowSize} must be greater than 0. * The {@code minimumNumberOfCalls} must be greater than 0. * If the slidingWindowType is COUNT_BASED, the {@code minimumNumberOfCalls} cannot be greater than {@code slidingWindowSize}. * If the slidingWindowType is TIME_BASED, you can pick whatever you want. * * Default slidingWindowSize is 100. */ public Resilience4jConfigurationDefinition slidingWindowSize(Integer slidingWindowSize) { setSlidingWindowSize(slidingWindowSize != null ? slidingWindowSize.toString() : null); return this; } /** * Configures the type of the sliding window which is used to record the outcome of calls when the CircuitBreaker is closed. * Sliding window can either be count-based or time-based. * * If {@code slidingWindowType} is COUNT_BASED, the last {@code slidingWindowSize} calls are recorded and aggregated. * If {@code slidingWindowType} is TIME_BASED, the calls of the last {@code slidingWindowSize} seconds are recorded and aggregated. * * Default slidingWindowType is COUNT_BASED. */ public Resilience4jConfigurationDefinition slidingWindowType(String slidingWindowType) { setSlidingWindowType(slidingWindowType); return this; } /** * Configures configures the minimum number of calls which are required (per sliding window period) before the CircuitBreaker can calculate the error rate. * For example, if {@code minimumNumberOfCalls} is 10, then at least 10 calls must be recorded, before the failure rate can be calculated. * If only 9 calls have been recorded the CircuitBreaker will not transition to open even if all 9 calls have failed. * * Default minimumNumberOfCalls is 100 */ public Resilience4jConfigurationDefinition minimumNumberOfCalls(Integer minimumNumberOfCalls) { setMinimumNumberOfCalls(minimumNumberOfCalls != null ? minimumNumberOfCalls.toString() : null); return this; } /** * Enables writable stack traces. When set to false, Exception.getStackTrace returns a zero length array. * This may be used to reduce log spam when the circuit breaker is open as the cause of the exceptions is already known (the circuit breaker is short-circuiting calls). */ public Resilience4jConfigurationDefinition writableStackTraceEnabled(Boolean writableStackTraceEnabled) { setWritableStackTraceEnabled(writableStackTraceEnabled != null ? writableStackTraceEnabled.toString() : null); return this; } /** * Configures the wait duration (in seconds) which specifies how long the CircuitBreaker should stay open, before it switches to half open. * Default value is 60 seconds. */ public Resilience4jConfigurationDefinition waitDurationInOpenState(Integer waitDurationInOpenState) { setWaitDurationInOpenState(waitDurationInOpenState != null ? waitDurationInOpenState.toString() : null); return this; } /** * Enables automatic transition from OPEN to HALF_OPEN state once the waitDurationInOpenState has passed. */ public Resilience4jConfigurationDefinition automaticTransitionFromOpenToHalfOpenEnabled(Boolean automaticTransitionFromOpenToHalfOpenEnabled) { setAutomaticTransitionFromOpenToHalfOpenEnabled(automaticTransitionFromOpenToHalfOpenEnabled != null ? automaticTransitionFromOpenToHalfOpenEnabled.toString() : null); return this; } /** * Configures a threshold in percentage. The CircuitBreaker considers a call as slow when the call duration is greater than slowCallDurationThreshold(Duration. * When the percentage of slow calls is equal or greater the threshold, the CircuitBreaker transitions to open and starts short-circuiting calls. * <p> * The threshold must be greater than 0 and not greater than 100. * Default value is 100 percentage which means that all recorded calls must be slower than slowCallDurationThreshold. */ public Resilience4jConfigurationDefinition slowCallRateThreshold(Float slowCallRateThreshold) { setSlowCallRateThreshold(slowCallRateThreshold != null ? slowCallRateThreshold.toString() : null); return this; } /** * Configures the duration threshold (seconds) above which calls are considered as slow and increase the slow calls percentage. * Default value is 60 seconds. */ public Resilience4jConfigurationDefinition slowCallDurationThreshold(Integer slowCallDurationThreshold) { setSlowCallDurationThreshold(slowCallDurationThreshold != null ? slowCallDurationThreshold.toString() : null); return this; } /** * Whether bulkhead is enabled or not on the circuit breaker. * Default is false. */ public Resilience4jConfigurationDefinition bulkheadEnabled(Boolean bulkheadEnabled) { setBulkheadEnabled(bulkheadEnabled != null ? bulkheadEnabled.toString() : null); return this; } /** * Configures the max amount of concurrent calls the bulkhead will support. */ public Resilience4jConfigurationDefinition bulkheadMaxConcurrentCalls(Integer bulkheadMaxConcurrentCalls) { setBulkheadMaxWaitDuration(bulkheadMaxConcurrentCalls != null ? bulkheadMaxConcurrentCalls.toString() : null); return this; } /** * Configures a maximum amount of time which the calling thread will wait to enter the bulkhead. If bulkhead has space available, entry * is guaranteed and immediate. If bulkhead is full, calling threads will contest for space, if it becomes available. maxWaitDuration can be set to 0. * <p> * Note: for threads running on an event-loop or equivalent (rx computation pool, etc), setting maxWaitDuration to 0 is highly recommended. Blocking * an event-loop thread will most likely have a negative effect on application throughput. */ public Resilience4jConfigurationDefinition bulkheadMaxWaitDuration(Integer bulkheadMaxWaitDuration) { setBulkheadMaxWaitDuration(bulkheadMaxWaitDuration != null ? bulkheadMaxWaitDuration.toString() : null); return this; } /** * Whether timeout is enabled or not on the circuit breaker. * Default is false. */ public Resilience4jConfigurationDefinition timeoutEnabled(Boolean timeoutEnabled) { setTimeoutEnabled(timeoutEnabled != null ? timeoutEnabled.toString() : null); return this; } /** * References to a custom thread pool to use when timeout is enabled (uses {@link ForkJoinPool#commonPool()} by default) */ public Resilience4jConfigurationDefinition timeoutExecutorServiceRef(String executorServiceRef) { setTimeoutExecutorServiceRef(executorServiceRef); return this; } /** * Configures the thread execution timeout (millis). * Default value is 1000 millis (1 second). */ public Resilience4jConfigurationDefinition timeoutDuration(Integer timeoutDuration) { setTimeoutDuration(timeoutDuration != null ? timeoutDuration.toString() : null); return this; } /** * Configures whether cancel is called on the running future. * Defaults to true. */ public Resilience4jConfigurationDefinition timeoutCancelRunningFuture(Boolean timeoutCancelRunningFuture) { setTimeoutCancelRunningFuture(timeoutCancelRunningFuture != null ? timeoutCancelRunningFuture.toString() : null); return this; } /** * End of configuration. */ public CircuitBreakerDefinition end() { return parent; } }
/** * Copyright (c) 2013-2014. Francisco Contreras, Holland Salazar. * Copyright (c) 2015. Tobias Strebitzer, Francisco Contreras, Holland Salazar. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * Neither the name of the Baker Framework nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written * permission. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT * SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. **/ package com.bakerframework.baker.activity; import android.annotation.SuppressLint; import android.content.Intent; import android.content.pm.ActivityInfo; import android.net.Uri; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.app.FragmentActivity; import android.support.v4.app.NavUtils; import android.support.v4.view.GestureDetectorCompat; import android.util.Log; import android.view.GestureDetector; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.Button; import android.widget.Toast; import com.bakerframework.baker.BakerApplication; import com.bakerframework.baker.R; import com.bakerframework.baker.model.BookJson; import com.bakerframework.baker.settings.Configuration; import com.bakerframework.baker.view.CustomViewPager; import com.bakerframework.baker.view.CustomWebView; import com.bakerframework.baker.view.WebViewFragmentPagerAdapter; import com.viewpagerindicator.LinePageIndicator; import java.io.File; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URL; import java.util.Map; public class IssueActivity extends FragmentActivity { private boolean doubleTap = false; private boolean enableDoubleTap = true; private boolean enableBackNextButton = false; private GestureDetectorCompat gestureDetector; private WebViewFragmentPagerAdapter webViewFragmentPagerAdapter; private CustomViewPager viewPager; private BookJson jsonBook; public final static String MODAL_URL = "com.bakerframework.baker.MODAL_URL"; public final static String ORIENTATION = "com.bakerframework.baker.ORIENTATION"; private boolean ENABLE_TUTORIAL = false; public BookJson getJsonBook() { return this.jsonBook; } public CustomViewPager getViewPager() { return this.viewPager; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // We would like to keep the screen on while reading the magazine getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); // Remove title bar this.requestWindowFeature(Window.FEATURE_NO_TITLE); // Remove notification bar this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); // Set Content View setContentView(R.layout.issue_activity); // Initialize Pager viewPager = (CustomViewPager) findViewById(R.id.pager); // Get issue Intent intent = getIntent(); String issueName = intent.getStringExtra(Configuration.ISSUE_NAME); try { ENABLE_TUTORIAL = intent.getBooleanExtra(Configuration.ISSUE_ENABLE_TUTORIAL, false); if (!intent.getBooleanExtra(Configuration.ISSUE_RETURN_TO_SHELF, true)) { setResult(ShelfActivity.STANDALONE_MAGAZINE_ACTIVITY_FINISH); } else { setResult(0); } jsonBook = new BookJson(); jsonBook.setIssueName(issueName); Log.d("MLC-APP "+this.getClass().toString(), "THE RAW BOOK.JSON IS: " + intent.getStringExtra(Configuration.BOOK_JSON_KEY)); jsonBook.fromJsonString(intent.getStringExtra(Configuration.BOOK_JSON_KEY)); this.setOrientation(jsonBook.getOrientation()); this.setPagerView(jsonBook); this.setEnableDoubleTap(intent.getBooleanExtra(Configuration.ISSUE_ENABLE_DOUBLE_TAP, true)); this.setEnableBackNextButton(intent.getBooleanExtra(Configuration.ISSUE_ENABLE_BACK_NEXT_BUTTONS, false)); detectFirstOrLastPage(); gestureDetector = new GestureDetectorCompat(this, new MyGestureListener()); } catch (Exception ex) { ex.printStackTrace(); Toast.makeText(this, "Not valid book.json found!", Toast.LENGTH_LONG).show(); } // Plugin Callback BakerApplication.getInstance().getPluginManager().onIssueActivityCreated(this); } private void detectFirstOrLastPage() { if (!isEnableBackNextButton()) { return; } int allItems = this.getJsonBook().getContents().size(); int currentItem = this.viewPager.getCurrentItem(); if (currentItem == (allItems - 1)) { Log.d("MLC-APP "+this.getClass().getName(), "Last page detected."); ((Button)findViewById(R.id.buttonNext)).setText(getString(R.string.lbl_finish)); if (allItems > 1) { findViewById(R.id.buttonBack).setVisibility(View.VISIBLE); } } else if (currentItem == 0) { Log.d("MLC-APP "+this.getClass().getName(), "First page detected."); findViewById(R.id.buttonBack).setVisibility(View.GONE); ((Button)findViewById(R.id.buttonNext)).setText(getString(R.string.lbl_next)); } else { findViewById(R.id.buttonBack).setVisibility(View.VISIBLE); ((Button)findViewById(R.id.buttonNext)).setText(getString(R.string.lbl_next)); } } private void goNext() { int currentItem = this.viewPager.getCurrentItem(); int nextItem = currentItem + 1; int allItems = this.getJsonBook().getContents().size(); Log.d("MLC-APP "+this.getClass().getName(), "All items: " + allItems + ", current item: " + currentItem + ", next item: " + nextItem); if (nextItem < allItems) { this.viewPager.setCurrentItem(nextItem); this.detectFirstOrLastPage(); } else if (nextItem == allItems) { this.finish(); } } private void goBack() { int currentItem = this.viewPager.getCurrentItem(); int nextItem = currentItem - 1; int allItems = this.getJsonBook().getContents().size(); Log.d("MLC-APP "+this.getClass().getName(), "All items: " + allItems + ", current item: " + currentItem + ", next item: " + nextItem); if (nextItem >= 0) { this.viewPager.setCurrentItem(nextItem); this.detectFirstOrLastPage(); } } private void setOrientation(String _orientation) { _orientation = _orientation.toUpperCase(); final String PORTRAIT = "PORTRAIT"; final String LANDSCAPE = "LANDSCAPE"; switch (_orientation) { case PORTRAIT: this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); break; case LANDSCAPE: this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); break; default: this.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR); break; } } public boolean isEnableDoubleTap() { return enableDoubleTap; } public void setEnableDoubleTap(boolean enableDoubleTap) { this.enableDoubleTap = enableDoubleTap; } public boolean isEnableBackNextButton() { return enableBackNextButton; } public void setEnableBackNextButton(boolean enableBackNextButton) { this.enableBackNextButton = enableBackNextButton; if (enableBackNextButton) { findViewById(R.id.buttonNext).setVisibility(View.VISIBLE); // Click on the next button findViewById(R.id.buttonNext).setOnClickListener(new View.OnClickListener() { public void onClick(View v) { goNext(); } }); // No need for a "Back" button when there's only one page. if (this.getJsonBook().getContents().size() > 1) { findViewById(R.id.buttonBack).setVisibility(View.VISIBLE); // Click on the back button findViewById(R.id.buttonBack).setOnClickListener(new View.OnClickListener() { public void onClick(View v) { goBack(); } }); } } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.magazine, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: // This ID represents the Home or Up button. In the case of this // activity, the Up button is shown. Use NavUtils to allow users // to navigate up one level in the application structure. For // more details, see the Navigation pattern on Android Design: // // http://developer.android.com/design/patterns/navigation.html#up-vs-back // NavUtils.navigateUpFromSameTask(this); return true; } return super.onOptionsItemSelected(item); } @SuppressLint("SetJavaScriptEnabled") private void setPagerView(final BookJson book) { // Set asset path final String path = ENABLE_TUTORIAL ? Configuration.getTutorialAssetPath() : Configuration.getMagazineAssetPath(); Log.d("MLC-APP "+this.getClass().toString(), "THE PATH FOR LOADING THE PAGES WILL BE: " + path); // ViewPager and its adapters use support library fragments, so use getSupportFragmentManager. webViewFragmentPagerAdapter = new WebViewFragmentPagerAdapter(getSupportFragmentManager(), book, path, this); viewPager.setAdapter(webViewFragmentPagerAdapter); viewPager.setOffscreenPageLimit(1); //Bind the title indicator to the adapter LinePageIndicator indicator = (LinePageIndicator)findViewById(R.id.indicator); indicator.setViewPager(viewPager); indicator.setOnPageChangeListener(new CustomViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { super.onPageSelected(position); viewPager.setScrollEnabled(true); viewPager.setSwipeEnabled(true); Log.d("MLC-APP "+this.getClass().getName(), "Loading page at index: " + position); detectFirstOrLastPage(); } }); // Only show indicator in tutorial mode if (!ENABLE_TUTORIAL) { indicator.setVisibility(View.GONE); } /* indicator.setOnPageChangeListener(new ViewPager.OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { WebViewFragment fragment1 = (WebViewFragment) webViewFragmentPagerAdapter.getItem(position); WebViewFragment fragment2 = (WebViewFragment) webViewFragmentPagerAdapter.getItem(position + 1); if(fragment1 != null && fragment1.getView() != null) { fragment1.getWebView().setAlpha(1 - positionOffset); } if(fragment2 != null && fragment2.getView() != null) { fragment2.getWebView().setAlpha(positionOffset); } } @Override public void onPageSelected(int position) { } @Override public void onPageScrollStateChanged(int state) { } }); */ // Set up index webview CustomWebView viewIndex = (CustomWebView) findViewById(R.id.webViewIndex); viewIndex.getSettings().setJavaScriptEnabled(true); viewIndex.getSettings().setUseWideViewPort(true); viewIndex.setWebViewClient(new WebViewClient() { @Override public boolean shouldOverrideUrlLoading(WebView view, String stringUrl) { // mailto links will be handled by the OS. if (stringUrl.startsWith("mailto:")) { Uri uri = Uri.parse(stringUrl); Intent intent = new Intent(Intent.ACTION_VIEW, uri); startActivity(intent); } else { try { URL url = new URL(stringUrl); // We try to remove the referrer string to avoid passing it to the server in case the URL is an external link. String referrer = ""; if (url.getQuery() != null) { Map<String, String> variables = Configuration.splitUrlQueryString(url); String finalQueryString = ""; for (Map.Entry<String, String> entry : variables.entrySet()) { if (entry.getKey().equals("referrer")) { referrer = entry.getValue(); } else { finalQueryString += entry.getKey() + "=" + entry.getValue() + "&"; } } if (!finalQueryString.isEmpty()) { finalQueryString = "?" + finalQueryString.substring(0, finalQueryString.length() - 1); } stringUrl = stringUrl.replace("?" + url.getQuery(), finalQueryString); } // Aaaaand that was the process of removing the referrer from the query string. if (!url.getProtocol().equals("file")) { Log.d("MLC-APP "+"REFERRER>>>", "THE REFERRER IS: " + referrer); if (referrer.toLowerCase().equals(IssueActivity.this.getString(R.string.url_external_referrer))) { Uri uri = Uri.parse(stringUrl); Intent intent = new Intent(Intent.ACTION_VIEW, uri); startActivity(intent); } else if (referrer.equals(IssueActivity.this.getString(R.string.url_baker_referrer))) { IssueActivity.this.openLinkInModal(stringUrl); return true; } else { // Open modal window by default IssueActivity.this.openLinkInModal(stringUrl); return true; } } else { stringUrl = stringUrl.substring(stringUrl.indexOf("/") + 2); Log.d("MLC-APP "+">>>URL_DATA", "FINAL INTERNAL HTML FILENAME = " + stringUrl); int index = IssueActivity.this.getJsonBook().getContents().indexOf(stringUrl); if (index != -1) { Log.d("MLC-APP "+this.getClass().toString(), "Index to load: " + index + ", page: " + stringUrl); IssueActivity.this.getViewPager().setCurrentItem(index); view.setVisibility(View.GONE); } else { // If the file DOES NOT exist, we won't load it. File htmlFile = new File(url.getPath()); if (htmlFile.exists()) { // Open modal window by default IssueActivity.this.openLinkInModal("file://" + url.getPath()); return true; } } } } catch (MalformedURLException ex) { Log.d("MLC-APP "+">>>URL_DATA", ex.getMessage()); } catch (UnsupportedEncodingException ignored) { } } return true; } }); viewIndex.loadUrl(path + book.getMagazineName() + File.separator + "index.html"); viewIndex.setBackgroundColor(0x00000000); viewIndex.setLayerType(WebView.LAYER_TYPE_SOFTWARE, null); } @Override public boolean dispatchTouchEvent(@NonNull MotionEvent event) { // Intercept the touch events. this.gestureDetector.onTouchEvent(event); if (doubleTap) { //No need to pass double tap to children doubleTap = false; } else { // We call the superclass implementation for the touch // events to continue along children. return super.dispatchTouchEvent(event); } return true; } @Override public boolean onTouchEvent(MotionEvent event) { this.gestureDetector.onTouchEvent(event); if (doubleTap) { //No need to pass double tap to children doubleTap = false; } else { // We call the superclass implementation. return super.onTouchEvent(event); } return true; } public void openLinkInModal(final String url) { Intent intent = new Intent(this, ModalActivity.class); intent.putExtra(MODAL_URL, url); intent.putExtra(ORIENTATION, this.getRequestedOrientation()); startActivity(intent); } /** * Used to handle the gestures, but we will only need the onDoubleTap. The * other events will be passed to children views. * * @author Holland * */ class MyGestureListener extends GestureDetector.SimpleOnGestureListener { @Override public boolean onDoubleTap(MotionEvent event) { if (isEnableDoubleTap()) { doubleTap = true; CustomWebView viewIndex = (CustomWebView) findViewById(R.id.webViewIndex); //Disable Index Zoom viewIndex.getSettings().setSupportZoom(false); if (viewIndex.isShown()) { viewIndex.setVisibility(View.GONE); } else { viewIndex.setVisibility(View.VISIBLE); } } return true; } } }
package org.apache.jsp.admin; import javax.servlet.*; import javax.servlet.http.*; import javax.servlet.jsp.*; import org.apache.solr.core.SolrConfig; import org.apache.solr.core.SolrCore; import org.apache.solr.schema.IndexSchema; import java.io.File; import java.net.InetAddress; import java.io.StringWriter; import org.apache.solr.core.Config; import org.apache.solr.common.util.XML; import org.apache.solr.common.SolrException; import org.apache.lucene.LucenePackage; import java.net.UnknownHostException; public final class schema_jsp extends org.apache.jasper.runtime.HttpJspBase implements org.apache.jasper.runtime.JspSourceDependent { private static final JspFactory _jspxFactory = JspFactory.getDefaultFactory(); private static java.util.Vector _jspx_dependants; static { _jspx_dependants = new java.util.Vector(2); _jspx_dependants.add("/admin/header.jsp"); _jspx_dependants.add("/admin/_info.jsp"); } private org.apache.jasper.runtime.ResourceInjector _jspx_resourceInjector; public Object getDependants() { return _jspx_dependants; } public void _jspService(HttpServletRequest request, HttpServletResponse response) throws java.io.IOException, ServletException { PageContext pageContext = null; HttpSession session = null; ServletContext application = null; ServletConfig config = null; JspWriter out = null; Object page = this; JspWriter _jspx_out = null; PageContext _jspx_page_context = null; try { response.setContentType("text/html; charset=utf-8"); pageContext = _jspxFactory.getPageContext(this, request, response, null, true, 8192, true); _jspx_page_context = pageContext; application = pageContext.getServletContext(); config = pageContext.getServletConfig(); session = pageContext.getSession(); out = pageContext.getOut(); _jspx_out = out; _jspx_resourceInjector = (org.apache.jasper.runtime.ResourceInjector) application.getAttribute("com.sun.appserv.jsp.resource.injector"); out.write('\n'); out.write('\n'); out.write('\n'); out.write('\n'); out.write('\n'); out.write("\n"); out.write(" \n"); out.write("<script src=\"jquery-1.2.3.min.js\"></script>\n"); out.write("<script>\n"); out.write("\n"); out.write("(function($, libName) {\n"); out.write(" var solr = {\n"); out.write(" \n"); out.write(" //The default location of the luke handler relative to this page\n"); out.write(" // Can be overridden in the init(url) method\n"); out.write(" pathToLukeHandler: 'luke', \n"); out.write(" \n"); out.write(" // Base properties to hold schema information\n"); out.write(" schemaInfo: {},\n"); out.write(" schemaFields: {},\n"); out.write(" schemaDynamicFields: {},\n"); out.write(" schemaTypes: {},\n"); out.write(" schemaFlags: {},\n"); out.write(" \n"); out.write(" //The basic function to call to make the initail JSON calls\n"); out.write(" // takes one option parameter, the path to the luke handler\n"); out.write(" // if undefined, it will use the default, 'luke', which means\n"); out.write(" // this is being called from the same relative URL path\n"); out.write(" init: function(pathToLukeHandler) {\n"); out.write(" if (pathToLukeHandler != undefined) {\n"); out.write(" solr.pathToLukeHandler = pathToLukeHandler;\n"); out.write(" }\n"); out.write(" solr.loadSchema(function() {\n"); out.write(" solr.loadFromLukeHandler(function () {\n"); out.write(" solr.createMenu('menu');\n"); out.write(" solr.displaySchemaInfo();\n"); out.write(" });\n"); out.write(" });\n"); out.write("\n"); out.write(" },\n"); out.write(" \n"); out.write(" //load the Schema from the LukeRequestHandler\n"); out.write(" // this loads every field, and in each field the copy source/dests and flags\n"); out.write(" // we also load the list of field types, and the list of flags\n"); out.write(" loadSchema: function(func) {\n"); out.write("\t\t\t$.getJSON(solr.pathToLukeHandler +'?show=schema&wt=json', function(data) {\n"); out.write(" //populate all non field/type/flag data in the info block\n"); out.write(" $.each(data.index, function(i, item) {\n"); out.write(" solr.schemaInfo[i] = item;\n"); out.write(" });\n"); out.write(" \n"); out.write(" //LukeRequestHandler places these two attributes outside of the \"index\" node, but\n"); out.write(" // we want it here so we can more easily display it in the \"HOME\" block\n"); out.write(" solr.schemaInfo['uniqueKeyField'] = data.schema.uniqueKeyField;\n"); out.write(" solr.schemaInfo['defaultSearchField'] = data.schema.defaultSearchField;\n"); out.write(" //a one-off hack, because the directory string is so long and unbroken\n"); out.write(" // that it can break CSS layouts\n"); out.write(" solr.schemaInfo['directory'] = solr.schemaInfo['directory'].substring(0, solr.schemaInfo['directory'].indexOf('@')+1) + ' ' + solr.schemaInfo['directory'].substring(solr.schemaInfo['directory'].indexOf('@') +1);\n"); out.write(" \n"); out.write(" // populate the list of fields\n"); out.write("\t\t\t\t$.each(data.schema.fields, function(i,item){\n"); out.write("\t\t\t\t\tsolr.schemaFields[i]=item;\n"); out.write(" \t\t});\n"); out.write(" // populate the list of field types\n"); out.write("\t $.each(data.schema.types, function(type, ft) {\n"); out.write(" solr.schemaTypes[type] = ft;\n"); out.write(" });\n"); out.write(" //populate the list of dynamic fields\n"); out.write(" $.each(data.schema.dynamicFields, function(i, dynField) {\n"); out.write(" solr.schemaDynamicFields[i] = dynField;\n"); out.write(" });\n"); out.write(" //populate the list of flags, so we can convert flags to text in display\n"); out.write(" \t$.each(data.info.key, function(i, flag) {\n"); out.write(" \t\tsolr.schemaFlags[i] = flag;\n"); out.write(" \t \t});\n"); out.write(" \n"); out.write(" //LukeRequestHandler returns copyFields src/dest as the entire toString of the field\n"); out.write(" // we only need the field name, so here we loop through the fields, and replace the full\n"); out.write(" // field definitions with the name in the copySources/copyDests properties\n"); out.write(" $.each(solr.schemaFields, function(i, field) {\n"); out.write(" $.each(['copySources', 'copyDests'], function(i, copyProp) {\n"); out.write(" var newFields = new Array();\n"); out.write(" $.each(field[copyProp], function(i, fullName) {\n"); out.write(" newFields.push(fullName.substring(fullName.lastIndexOf(':')+1, fullName.indexOf('{')));\n"); out.write(" });\n"); out.write(" field[copyProp] = newFields;\n"); out.write(" });\n"); out.write(" \n"); out.write(" });\n"); out.write(" //An additional optional callback\n"); out.write(" // used in init to trigger the 2nd call to LukeRequestHandler only\n"); out.write(" // after the first one is finished\n"); out.write(" if ($.isFunction(func)) {\n"); out.write(" func(solr);\n"); out.write(" }\n"); out.write(" });\n"); out.write(" },\n"); out.write("\n"); out.write(" //further populates the loaded schema with information gathered\n"); out.write(" // from the no argument LukeRequestHandler\n"); out.write(" loadFromLukeHandler: function(func) {\n"); out.write(" $.getJSON(solr.pathToLukeHandler+'?wt=json', function(data) {\n"); out.write(" $.each(data.fields, function(i, item) {\n"); out.write(" var field = solr.schemaFields[i];\n"); out.write(" \n"); out.write(" //If undefined, then we have a dynamicField which does not show up\n"); out.write(" // in the LukeRequestHandler show=schema variant\n"); out.write(" if (field == undefined) {\n"); out.write(" field = item;\n"); out.write(" //Attach this field to its dynamicField\n"); out.write(" var base = field.dynamicBase;\n"); out.write(" var dynField = solr.schemaDynamicFields[base];\n"); out.write("\n"); out.write(" //Some fields in a multicore setting have no dynamic base, either\n"); out.write(" // the name of the core is a field that has no type or flags\n"); out.write(" if (dynField != undefined) {\n"); out.write(" \tvar synFields = dynField['fields'];\n"); out.write("\t if (synFields== undefined) {\n"); out.write(" \t synFields= new Array();\n"); out.write(" \t }\n"); out.write(" \tsynFields.push(i);\n"); out.write(" \tdynField['fields'] = synFields;\n"); out.write(" }\n"); out.write(" solr.schemaFields[i] = item;\n"); out.write(" }\n"); out.write(" //Populate other data in this field that would not have been loaded in\n"); out.write(" // the show=schema variant\n"); out.write(" $.each(item, function(k, v) {\n"); out.write(" if (k == 'topTerms' || k == 'histogram') {\n"); out.write(" solr.schemaFields[i][k] = solr.lukeArrayToHash(v);\n"); out.write(" } else {\n"); out.write(" solr.schemaFields[i][k] = v;\n"); out.write(" }\n"); out.write(" });\n"); out.write(" });\n"); out.write(" //another optional callback; used in the init case to lay out the page\n"); out.write(" // after the data is loaded\n"); out.write(" if ($.isFunction(func)) {\n"); out.write(" func();\n"); out.write(" }\n"); out.write(" });\n"); out.write(" },\n"); out.write(" //some elements in the JSON response are arrays, where odd/even elements\n"); out.write(" // are the name/value, and convert it to a standard map/associative array\n"); out.write(" // incoming: ['foo', 'bar', 'bat', 'baz']\n"); out.write(" // output: {'foo':'bar', 'bat':baz'}\n"); out.write(" lukeArrayToHash: function(termsArr) {\n"); out.write(" var hash = new Object();\n"); out.write("\t\t\t\tvar temp;\n"); out.write(" //topTerms comes in as an array, with odd indexes the field name\n"); out.write(" // and even indexes the number\n"); out.write("\t\t\t\t$.each(termsArr, function(i, item) {\n"); out.write("\t\t\t\t\tif (i%2 ==0) {\n"); out.write("\t\t\t\t\t\ttemp = item;\n"); out.write("\t\t\t\t\t} else {\n"); out.write("\t\t\t\t\t\thash[temp] = item;\n"); out.write("\t\t\t\t\t} \n"); out.write("\t\t\t\t});\n"); out.write("\t\t\t\treturn hash;\n"); out.write(" },\n"); out.write(" \n"); out.write(" //gets the top Terms via an Ajax call the LukeRequestHandler for that field\n"); out.write(" // The callback is used here to redraw the table after the ajax call returns\n"); out.write("\t\tgetTopTerms: function(fieldName, numTerms, func) {\n"); out.write(" if (numTerms == undefined) {\n"); out.write(" var numTerms = 10;\n"); out.write(" }\n"); out.write(" if (isNaN(numTerms) || numTerms <=0 || numTerms.indexOf('.') != -1) {\n"); out.write(" return;\n"); out.write(" }\n"); out.write("\t\t\t$.getJSON(solr.pathToLukeHandler+'?fl='+fieldName+'&wt=json&numTerms='+numTerms, function(data) { \n"); out.write("\t\t\t\tsolr.schemaFields[fieldName]['topTerms'] = solr.lukeArrayToHash(data.fields[fieldName].topTerms);\n"); out.write(" if ($.isFunction(func)) {\n"); out.write(" func(solr.schemaFields[fieldName]['topTerms'], fieldName);\n"); out.write(" }\n"); out.write("\t\t\t});\n"); out.write("\t\t},\n"); out.write(" \n"); out.write(" // Displays the SchemaInfo in the main content panel\n"); out.write(" // dispayed on data load, and also when 'Home' is clicked\n"); out.write(" displaySchemaInfo: function() {\n"); out.write(" $('#mainInfo').html('');\n"); out.write(" $('#topTerms').html('');\n"); out.write(" $('#histogram').html(''); \n"); out.write(" $('#mainInfo').append(solr.createSimpleText('Schema Information'));\n"); out.write(" //Make sure the uniqueKeyField and defaultSearchFields come first\n"); out.write(" $.each({'Unique Key':'uniqueKeyField', 'Default Search Field':'defaultSearchField'}, function(text, prop) {\n"); out.write(" if (solr.schemaInfo[prop] != undefined) {\n"); out.write(" $('#mainInfo').append(solr.createNameValueText(text, function(p) {\n"); out.write(" p.appendChild(solr.createLink(solr.schemaInfo[prop], solr.schemaInfo[prop]));\n"); out.write(" return p;\n"); out.write(" }));\n"); out.write(" } \n"); out.write(" });\n"); out.write(" $.each(solr.schemaInfo, function(i, item) {\n"); out.write(" if (i == 'uniqueKeyField' || i == 'defaultSearchField') {\n"); out.write(" //noop; we took care of this above\n"); out.write(" } else {\n"); out.write(" $('#mainInfo').append(solr.createNameValueText(i, item));\n"); out.write(" }\n"); out.write(" });\n"); out.write(" //Close all menus when we display schema home\n"); out.write(" solr.toggleMenus(undefined, ['fields', 'types', 'dynFields']);\n"); out.write(" },\n"); out.write(" \n"); out.write(" // display a dynamic field in the main content panel\n"); out.write(" displayDynamicField: function(dynamicPattern) {\n"); out.write(" var df = solr.schemaDynamicFields[dynamicPattern];\n"); out.write(" $('#mainInfo').html('');\n"); out.write(" $('#topTerms').html('');\n"); out.write(" $('#histogram').html('');\n"); out.write(" $('#mainInfo').append(solr.createSimpleText('Dynamic Field: ' + dynamicPattern));\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Fields', function(p) {\n"); out.write(" if (df.fields != undefined) {\n"); out.write(" $.each(df.fields, function(i, item) {\n"); out.write(" p.appendChild(solr.createLink(item, item));\n"); out.write(" });\n"); out.write(" } else {\n"); out.write(" p.appendChild(document.createTextNode(' None currently in index'));\n"); out.write(" }\n"); out.write(" return p;\n"); out.write(" }));\n"); out.write(" var ft = solr.schemaTypes[df.type];\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Field Type', function(p) {\n"); out.write(" p.appendChild(solr.createLink(df.type, df.type, solr.displayFieldType));\n"); out.write(" return p;\n"); out.write(" }));\n"); out.write(" if (df.flags != undefined) {\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Properties', solr.createTextFromFlags(df.flags, df.type)));\n"); out.write(" }\n"); out.write(" solr.displayAnalyzer(ft.indexAnalyzer, 'Index Analyzer', true);\n"); out.write(" solr.displayAnalyzer(ft.queryAnalyzer, 'Query Analyzer', true);\n"); out.write("\n"); out.write(" solr.toggleMenus('dynFields', ['fields', 'types'], dynamicPattern);\n"); out.write(" },\n"); out.write(" \n"); out.write(" // display a field type in the main area\n"); out.write(" displayFieldType: function(typeName) {\n"); out.write(" var ft = solr.schemaTypes[typeName];\n"); out.write(" $('#mainInfo').html('');\n"); out.write(" $('#topTerms').html('');\n"); out.write(" $('#histogram').html('');\n"); out.write("\t\t\t$('#mainInfo').append(solr.createSimpleText('Field Type: ' + typeName));\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Fields', function(p) {\n"); out.write(" if (ft.fields != undefined) {\n"); out.write(" $.each(ft.fields, function(i, item) {\n"); out.write(" if (solr.schemaFields[item] != undefined) {\n"); out.write(" p.appendChild(solr.createLink(item, item));\n"); out.write(" } else {\n"); out.write(" p.appendChild(solr.createLink(item, item, solr.displayDynamicField));\n"); out.write(" }\n"); out.write(" p.appendChild(document.createTextNode(' '));\n"); out.write(" });\n"); out.write(" } else {\n"); out.write(" p.appendChild(document.createTextNode('No fields in index'));\n"); out.write(" }\n"); out.write(" return p;\n"); out.write(" }));\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Tokenized', ft.tokenized));\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Class Name', ft.className));\n"); out.write("\n"); out.write(" solr.displayAnalyzer(ft.indexAnalyzer, 'Index Analyzer');\n"); out.write(" solr.displayAnalyzer(ft.queryAnalyzer, 'Query Analyzer');\n"); out.write(" solr.toggleMenus('types', ['fields', 'dynFields'], typeName);\n"); out.write(" },\n"); out.write(" \n"); out.write(" //Displays information about an Analyzer in the main content area\n"); out.write(" displayAnalyzer: function(analyzer, type, shouldCollapse) {\n"); out.write(" var tid = type.replace(' ', '');\n"); out.write(" var collapse = shouldCollapse && (analyzer.tokenizer != undefined || analyzer.filters != undefined);\n"); out.write(" $('#mainInfo').append(solr.createNameValueText(type, function(p) {\n"); out.write(" p.appendChild(document.createTextNode(analyzer.className + ' '));\n"); out.write(" if (collapse) {\n"); out.write(" p.appendChild(solr.createLink(type, 'Details', function() {\n"); out.write(" $('#'+tid).toggle(\"slow\");\n"); out.write(" }));\n"); out.write(" }\n"); out.write(" return p;\n"); out.write(" }));\n"); out.write(" var adiv = document.createElement('div');\n"); out.write(" adiv.id=tid;\n"); out.write(" adiv.className='analyzer';\n"); out.write(" if (collapse) {\n"); out.write(" adiv.style.display='none';\n"); out.write(" }\n"); out.write(" if (analyzer.tokenizer != undefined) {\n"); out.write(" adiv.appendChild(solr.createNameValueText(\"Tokenizer Class\", analyzer.tokenizer.className));\n"); out.write(" }\n"); out.write(" if (analyzer.filters != undefined) {\n"); out.write(" adiv.appendChild(solr.createNameValueText('Filters', ''));\n"); out.write(" var f = document.createElement('ol');\n"); out.write(" $.each(analyzer.filters, function(i, item) {\n"); out.write(" var fil = document.createElement('li');\n"); out.write(" var filterText = item.className;\n"); out.write(" if (item.args != undefined) {\n"); out.write(" filterText += ' args:{'\n"); out.write(" $.each(item.args, function(fi, fitem) {\n"); out.write(" filterText += fi + ': ' + fitem + ' ';\n"); out.write(" });\n"); out.write(" filterText +='}';\n"); out.write(" fil.innerHTML = filterText;\n"); out.write(" f.appendChild(fil);\n"); out.write(" }\n"); out.write(" });\n"); out.write(" adiv.appendChild(f);\n"); out.write(" }\n"); out.write(" $('#mainInfo').append(adiv);\n"); out.write(" },\n"); out.write(" \n"); out.write(" // display information about a Field in the main content area\n"); out.write(" // and its TopTerms and Histogram in related divs\n"); out.write("\t\tdisplayField: function(fieldName) {\n"); out.write(" var field = solr.schemaFields[fieldName];\n"); out.write(" var isDynamic = field.dynamicBase != undefined ? true : false;\n"); out.write(" var ft;\n"); out.write(" var ftName;\n"); out.write(" $('#mainInfo').html(''); \n"); out.write(" $('#topTerms').html('');\n"); out.write(" $('#histogram').html('');\n"); out.write(" $('#mainInfo').append(solr.createSimpleText('Field: ' + fieldName));\n"); out.write(" \n"); out.write(" //For regular fields, we take their properties; for dynamicFields,\n"); out.write(" // we take them from their dynamicField definitions\n"); out.write(" if (isDynamic) {\n"); out.write(" ftName = solr.schemaDynamicFields[field.dynamicBase].type\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Dynamically Created From Pattern', function(p) {\n"); out.write(" p.appendChild(solr.createLink(field.dynamicBase, field.dynamicBase, solr.displayDynamicField));\n"); out.write(" return p;\n"); out.write(" }));\n"); out.write(" } else {\n"); out.write(" ftName = field.type;\n"); out.write(" }\t\t\t\n"); out.write(" ft = solr.schemaTypes[field.type];\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Field Type', function(p) {\n"); out.write(" p.appendChild(solr.createLink(ftName, ftName, solr.displayFieldType));\n"); out.write(" return p;\n"); out.write(" }));\n"); out.write("\t\t\tif (solr.schemaFlags != '') {\n"); out.write(" $.each({'flags':'Properties', 'schema':'Schema', 'index':'Index'}, function(prop, text) {\n"); out.write(" if (field[prop] != undefined) {\n"); out.write(" $('#mainInfo').append(solr.createNameValueText(text, solr.createTextFromFlags(field[prop], ft)));\n"); out.write(" }\n"); out.write(" });\n"); out.write(" } \n"); out.write(" $.each({'copySources':'Copied From', 'copyDests':'Copied Into'}, function(prop, text) {\n"); out.write(" if (field[prop] != undefined && field[prop] != '') {\n"); out.write(" $('#mainInfo').append(solr.createNameValueText(text, function(p) {\n"); out.write(" $.each(field[prop], function(i, item) {\n"); out.write(" p.appendChild(solr.createLink(item, item));\n"); out.write(" p.appendChild(document.createTextNode(' '));\n"); out.write(" });\n"); out.write(" return p;\n"); out.write(" }));\n"); out.write(" }\n"); out.write(" });\n"); out.write(" if (field.positionIncrementGap != undefined) {\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Position Increment Gap', field.positionIncrementGap));\n"); out.write(" }\n"); out.write(" solr.displayAnalyzer(ft.indexAnalyzer, 'Index Analyzer', true);\n"); out.write(" solr.displayAnalyzer(ft.queryAnalyzer, 'Query Analyzer', true);\n"); out.write(" if (field.docs != undefined) {\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Docs', field.docs));\n"); out.write(" }\n"); out.write(" if (field.distinct != undefined) {\n"); out.write(" $('#mainInfo').append(solr.createNameValueText('Distinct', field.distinct));\n"); out.write(" }\n"); out.write("\n"); out.write(" if (field.topTerms != undefined) {\n"); out.write(" solr.displayTopTerms(field.topTerms, fieldName);\n"); out.write(" }\n"); out.write("\n"); out.write(" if (field.histogram != undefined) {\n"); out.write(" solr.drawHistogram(field.histogram);\n"); out.write(" }\n"); out.write(" solr.toggleMenus('fields', ['types', 'dynFields'], fieldName);\n"); out.write("\t\t},\t\n"); out.write("\n"); out.write(" //utility method to create a single sentence list of properties from a flag set\n"); out.write(" // or pass it on, if the flags are (unstored field)\n"); out.write("\t\tcreateTextFromFlags: function(fieldFlags, fieldType) {\n"); out.write("\t\t\tvar value;\n"); out.write(" if (fieldFlags != '(unstored field)') {\n"); out.write(" var value = ''; \n"); out.write(" for (var i=0;i<fieldFlags.length;i++) {\n"); out.write(" if (fieldFlags.charAt(i) != '-') {\n"); out.write(" value += solr.schemaFlags[fieldFlags.charAt(i)];\n"); out.write(" value += ', ';\n"); out.write(" }\n"); out.write(" }\n"); out.write(" value = value.substring(0, value.length-2);\n"); out.write("\t\t\t} else {\n"); out.write(" value = fieldFlags;\n"); out.write(" }\n"); out.write("\t\t\treturn value;\n"); out.write("\t\t},\n"); out.write("\n"); out.write(" //Store the currently highlighted menu item, as otherwise we\n"); out.write(" // must traverse all li menu items, which is very slow on schemas with\n"); out.write(" // large number of fields\n"); out.write(" // for example $('#menu ul li').siblings().removeClass('selected');\n"); out.write(" currentlyHighlightedMenuId: undefined,\n"); out.write(" \n"); out.write(" //add a highlight to the currently selected menu item, and remove\n"); out.write(" // the highlights from all other menu items\n"); out.write(" highlightMenuItem: function(idToSelect) {\n"); out.write(" if (solr.currentlyHighlightedMenuId != undefined) {\n"); out.write(" $('#'+solr.currentlyHighlightedMenuId).removeClass('selected');\n"); out.write(" }\n"); out.write(" $('#'+idToSelect).addClass('selected');\n"); out.write(" solr.currentlyHighlightedMenuId = idToSelect;\n"); out.write(" },\n"); out.write(" \n"); out.write(" //Opens one menu group, close the others, and optionally highlight one\n"); out.write(" // item, which should be in the opened menu\n"); out.write(" toggleMenus: function(idToShow, idsToHide, idToSelect) {\n"); out.write(" if (idToSelect != undefined) {\n"); out.write(" solr.highlightMenuItem(idToShow + idToSelect);\n"); out.write(" }\n"); out.write(" $('#'+idToShow).show(\"slow\");\n"); out.write(" $.each(idsToHide, function(i, idToHide) {\n"); out.write(" $('#'+idToHide).hide(\"slow\");\n"); out.write(" });\n"); out.write(" },\n"); out.write(" \n"); out.write(" //A utility method to create a paragraph, which takes two arguments;\n"); out.write(" // an opening text, and either text or a callback function to follow\n"); out.write(" // any callback function must return the node passed into it\n"); out.write(" createNameValueText: function(openingText, func) {\n"); out.write(" var p = document.createElement('p');\n"); out.write(" p.appendChild(solr.createSimpleText(openingText + ': ', 'b'));\n"); out.write(" return solr.applyFuncToNode(p, func);\n"); out.write(" },\n"); out.write("\n"); out.write(" //utility method to create an HTML text element node\n"); out.write(" // with the literal text to place, and an optional function to apply\n"); out.write(" // any callback function must return the node passed into it \n"); out.write(" createSimpleText: function(text, n, func) {\n"); out.write(" if (n == undefined) {\n"); out.write(" n = 'h2';\n"); out.write(" }\n"); out.write(" var no= document.createElement(n);\n"); out.write(" no.appendChild(document.createTextNode(text));\n"); out.write(" return solr.applyFuncToNode(no, func);\n"); out.write(" },\n"); out.write(" \n"); out.write(" //Utility method that applies a function or a string to append\n"); out.write(" // an additional child to a node\n"); out.write(" applyFuncToNode: function(no, func) {\n"); out.write(" if ($.isFunction(func)) {\n"); out.write(" no = func(no);\n"); out.write(" } else {\n"); out.write(" // if it is not a function, append it as a string\n"); out.write(" if (func != undefined) {\n"); out.write(" no.appendChild(document.createTextNode(' ' + func));\n"); out.write(" }\n"); out.write(" }\n"); out.write(" return no;\n"); out.write(" },\n"); out.write(" \n"); out.write(" //show a table of top terms for a given field\n"); out.write(" displayTopTerms: function(topTerms, fieldName) {\n"); out.write(" $('#topTerms').html('');\n"); out.write(" var tbl = document.createElement('table');\n"); out.write(" tbl.className='topTerms';\n"); out.write(" var thead= document.createElement('thead');\n"); out.write(" var headerRow = document.createElement('tr');\n"); out.write(" $.each(['term', 'frequency'], function() {\n"); out.write(" var cell = document.createElement('th');\n"); out.write(" cell.innerHTML= this;\n"); out.write(" headerRow.appendChild(cell);\n"); out.write(" });\n"); out.write(" thead.appendChild(headerRow);\n"); out.write(" tbl.appendChild(thead);\n"); out.write(" var tbody = document.createElement('tbody');\n"); out.write(" \n"); out.write(" var numTerms = 0;\n"); out.write(" $.each(topTerms, function(term, count) {\n"); out.write(" var row = document.createElement('tr');\n"); out.write(" var c1 = document.createElement('td');\n"); out.write(" c1.innerHTML=term;\n"); out.write(" var c2 = document.createElement('td');\n"); out.write(" c2.innerHTML=count;\n"); out.write(" row.appendChild(c1);\n"); out.write(" row.appendChild(c2);\n"); out.write(" tbody.appendChild(row);\n"); out.write(" numTerms++;\n"); out.write(" });\n"); out.write(" tbl.appendChild(tbody);\n"); out.write(" \n"); out.write(" //create a header along with an input widget so the user\n"); out.write(" // can request a different number of Top Terms\n"); out.write(" var h2 = document.createElement('h2');\n"); out.write(" h2.appendChild(document.createTextNode('Top '));\n"); out.write(" var termsGetter = document.createElement('input');\n"); out.write(" termsGetter.type='text';\n"); out.write(" termsGetter.size=5;\n"); out.write(" termsGetter.value=numTerms;\n"); out.write(" \n"); out.write(" termsGetter.onchange=function() {\n"); out.write(" solr.getTopTerms(fieldName, this.value, solr.displayTopTerms);\n"); out.write(" }\n"); out.write(" h2.appendChild(termsGetter);\n"); out.write(" h2.appendChild(document.createTextNode(' Terms'));\n"); out.write(" $('#topTerms').append(h2);\n"); out.write(" \n"); out.write(" document.getElementById('topTerms').appendChild(tbl);\n"); out.write(" $('#topTerms').append(tbl);\n"); out.write(" },\n"); out.write(" \n"); out.write(" //draws a histogram, taking a map of values and an optional total height and width for the table\n"); out.write(" drawHistogram: function(histogram, totalHeightArg, totalWidthArg) {\n"); out.write(" $('#histogram').html('');\n"); out.write(" $('#histogram').append(solr.createSimpleText('Histogram'));\n"); out.write(" var max = 0;\n"); out.write(" var bars =0;\n"); out.write(" //find the # of columns and max value in the histogram \n"); out.write(" // so we can create an appropriately scaled chart\n"); out.write(" $.each(histogram, function(i, item) {\n"); out.write(" if (item > max) max = item;\n"); out.write(" bars += 1;\n"); out.write(" });\n"); out.write(" if (max ==0) {\n"); out.write(" $('#histogram').append(solr.createNameValueText('No histogram available'));\n"); out.write(" } else {\n"); out.write(" var totalHeight = totalHeightArg == undefined ? 208 : totalHeightArg;\n"); out.write(" var totalWidth = totalWidthArg == undefined ? 160 : totalWidthArg;\n"); out.write(" var tbl = document.createElement('table');\n"); out.write(" tbl.style.width=totalWidth+'px';\n"); out.write(" tbl.className = 'histogram';\n"); out.write(" var h = document.createElement('tbody');\n"); out.write(" var r = document.createElement('tr');\n"); out.write(" var r2 = document.createElement('tr');\n"); out.write(" $.each(histogram, function(i, item) {\n"); out.write(" var c = document.createElement('td');\n"); out.write(" c.innerHTML=item+'<div style=\"width:'+totalWidth/bars+'px;height:'+(item*totalHeight/max)+'px;background:blue\">&nbsp</div>';\n"); out.write(" r.appendChild(c);\n"); out.write(" var c2 = document.createElement('td');\n"); out.write(" c2.innerHTML='' + i;\n"); out.write(" r2.appendChild(c2);\n"); out.write(" });\n"); out.write(" h.appendChild(r);\n"); out.write(" h.appendChild(r2);\n"); out.write(" tbl.appendChild(h);\n"); out.write(" $('#histogram').append(tbl);\n"); out.write(" }\n"); out.write(" },\n"); out.write(" \n"); out.write(" //dynamically creates a link to be appended\n"); out.write(" createLink: function(idToDisplay, linkText, linkFunction) {\n"); out.write(" var link = document.createElement('a');\n"); out.write(" if (!$.isFunction(linkFunction)) {\n"); out.write(" linkFunction = solr.displayField\n"); out.write(" }\n"); out.write(" link.onclick=function() {\n"); out.write(" linkFunction(idToDisplay);\n"); out.write(" return false;\n"); out.write(" };\n"); out.write(" link.href='#';\n"); out.write(" link.innerHTML=linkText;\n"); out.write(" return link;\n"); out.write(" },\n"); out.write(" \n"); out.write(" //Creates a menu header that can expand or collapse its children\n"); out.write(" createMenuHeader: function(text, idToShow, idsToHide) {\n"); out.write(" var head = document.createElement('h3');\n"); out.write(" var a = document.createElement('a');\n"); out.write(" a.onclick=function() {\n"); out.write(" solr.toggleMenus(idToShow, idsToHide);\n"); out.write(" return false;\n"); out.write(" };\n"); out.write(" a.href='#';\n"); out.write(" a.innerHTML=text;\n"); out.write(" head.appendChild(a);\n"); out.write(" return head;\n"); out.write(" },\n"); out.write(" \n"); out.write(" //Creates an element in a menu (e.g. each field in a list of fields)\n"); out.write(" createMenuItem: function(tagName, text, link, type, func) {\n"); out.write(" var fieldEle = document.createElement('li');\n"); out.write(" fieldEle.id=type+text;\n"); out.write(" var funct = func == undefined ? undefined : func;\n"); out.write(" fieldEle.appendChild(solr.createLink(text, link, funct));\n"); out.write(" return fieldEle;\n"); out.write(" },\n"); out.write(" \n"); out.write(" //populates the menu div\n"); out.write(" createMenu: function(menuId) {\n"); out.write(" var m = $('#'+menuId);\n"); out.write(" var home = document.createElement('h2');\n"); out.write(" home.appendChild(solr.createLink('Home', 'Home', solr.displaySchemaInfo));\n"); out.write(" m.append(home);\n"); out.write(" m.append(solr.createMenuHeader('Fields', 'fields', ['types', 'dynFields']));\n"); out.write(" var fields= document.createElement('ul');\n"); out.write(" fields.style.display='none';\n"); out.write(" fields.id = 'fields';\n"); out.write(" $.each(solr.schemaFields, function(i, item) {\n"); out.write(" fields.appendChild(solr.createMenuItem('li', i, i, fields.id));\n"); out.write(" });\n"); out.write(" m.append(fields);\n"); out.write(" m.append(solr.createMenuHeader('Dynamic Fields', 'dynFields', ['fields', 'types']));\n"); out.write(" var dyns = document.createElement('ul');\n"); out.write(" dyns.style.display = 'none';\n"); out.write(" dyns.id = 'dynFields';\n"); out.write(" $.each(solr.schemaDynamicFields, function(i, item) {\n"); out.write(" dyns.appendChild(solr.createMenuItem('li', i,i, dyns.id, solr.displayDynamicField));\n"); out.write(" });\n"); out.write(" m.append(dyns);\n"); out.write(" m.append(solr.createMenuHeader('Field Types', 'types', ['fields', 'dynFields']));\n"); out.write(" var types = document.createElement('ul');\n"); out.write(" types.style.display='none';\n"); out.write(" types.id='types';\n"); out.write(" $.each(this.schemaTypes, function(i, item) {\n"); out.write(" types.appendChild(solr.createMenuItem('li', i, i,types.id, solr.displayFieldType));\n"); out.write(" });\n"); out.write(" m.append(types);\n"); out.write(" }\n"); out.write(" };\n"); out.write(" \n"); out.write("\twindow[libName] = solr;\n"); out.write("})(jQuery, 'solr');\n"); out.write("$(document).ready(function() {\n"); out.write(" solr.init();\n"); out.write("});\n"); out.write(" \n"); out.write("$(window).unload( function() {\n"); out.write(" solr = null;\n"); out.write(" $('#mainInfo').html('');\n"); out.write(" $('#menu').html('');\n"); out.write(" $('#topTerms').html('');\n"); out.write(" $('#histogram').html('');\n"); out.write("});\n"); out.write(" \n"); out.write("</script>\n"); out.write('\n'); out.write('\n'); out.write("\n"); out.write("<html>\n"); out.write("<head>\n"); request.setCharacterEncoding("UTF-8"); out.write('\n'); out.write("\n"); out.write("\n"); out.write("\n"); out.write("\n"); out.write("\n"); out.write("\n"); out.write("\n"); out.write("\n"); out.write("\n"); // SolrCore core = (SolrCore) request.getAttribute("org.apache.solr.SolrCore"); if (core == null) { response.sendError( 404, "missing core name in path" ); return; } SolrConfig solrConfig = core.getSolrConfig(); int port = request.getServerPort(); IndexSchema schema = core.getSchema(); // enabled/disabled is purely from the point of a load-balancer // and has no effect on local server function. If there is no healthcheck // configured, don't put any status on the admin pages. String enabledStatus = null; String enabledFile = solrConfig.get("admin/healthcheck/text()",null); boolean isEnabled = false; if (enabledFile!=null) { isEnabled = new File(enabledFile).exists(); } String collectionName = schema!=null ? schema.getName():"unknown"; InetAddress addr = null; String hostname = "unknown"; try { addr = InetAddress.getLocalHost(); hostname = addr.getCanonicalHostName(); } catch (UnknownHostException e) { //default to unknown } String defaultSearch = ""; { StringWriter tmp = new StringWriter(); XML.escapeCharData (solrConfig.get("admin/defaultQuery/text()", null), tmp); defaultSearch = tmp.toString(); } String solrImplVersion = ""; String solrSpecVersion = ""; String luceneImplVersion = ""; String luceneSpecVersion = ""; { Package p; StringWriter tmp; p = SolrCore.class.getPackage(); tmp = new StringWriter(); solrImplVersion = p.getImplementationVersion(); if (null != solrImplVersion) { XML.escapeCharData(solrImplVersion, tmp); solrImplVersion = tmp.toString(); } tmp = new StringWriter(); solrSpecVersion = p.getSpecificationVersion() ; if (null != solrSpecVersion) { XML.escapeCharData(solrSpecVersion, tmp); solrSpecVersion = tmp.toString(); } p = LucenePackage.class.getPackage(); tmp = new StringWriter(); luceneImplVersion = p.getImplementationVersion(); if (null != luceneImplVersion) { XML.escapeCharData(luceneImplVersion, tmp); luceneImplVersion = tmp.toString(); } tmp = new StringWriter(); luceneSpecVersion = p.getSpecificationVersion() ; if (null != luceneSpecVersion) { XML.escapeCharData(luceneSpecVersion, tmp); luceneSpecVersion = tmp.toString(); } } String cwd=System.getProperty("user.dir"); String solrHome= solrConfig.getInstanceDir(); out.write("\n"); out.write("<script>\n"); out.write("var host_name=\""); out.print( hostname ); out.write("\"\n"); out.write("</script>\n"); out.write("\n"); out.write("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">\n"); out.write("<link rel=\"stylesheet\" type=\"text/css\" href=\"solr-admin.css\">\n"); out.write("<link rel=\"icon\" href=\"favicon.ico\" type=\"image/ico\"></link>\n"); out.write("<link rel=\"shortcut icon\" href=\"favicon.ico\" type=\"image/ico\"></link>\n"); out.write("<title>Solr admin page</title>\n"); out.write("</head>\n"); out.write("\n"); out.write("<body>\n"); out.write("<a href=\".\"><img border=\"0\" align=\"right\" height=\"61\" width=\"142\" src=\"solr-head.gif\" alt=\"Solr\"></a>\n"); out.write("<h1>Solr Admin ("); out.print( collectionName ); out.write(')'); out.write('\n'); out.print( enabledStatus==null ? "" : (isEnabled ? " - Enabled" : " - Disabled") ); out.write(" </h1>\n"); out.write("\n"); out.print( hostname ); out.write(':'); out.print( port ); out.write("<br/>\n"); out.write("cwd="); out.print( cwd ); out.write(" SolrHome="); out.print( solrHome ); out.write('\n'); out.write("\n"); out.write("<div id=\"schemaTop\">\n"); out.write("<h2>Schema Browser | See <a href=\"file/?file=schema.xml\">Raw Schema.xml</a></h2>\n"); out.write("</div>\n"); out.write("<div id=\"menu\"></div>\n"); out.write("<div id=\"content\">\n"); out.write("<div id=\"mainInfo\"><h2>Please wait...loading and parsing Schema Information from LukeRequestHandler</h2><p>If it does not load or your browser is not javascript or ajax-capable, you may wish to examine your schema using the <a href=\"luke?wt=xslt&tr=luke.xsl\">Server side transformed LukeRequestHandler</a> or the raw <a href=\"file/?file=schema.xml\">schema.xml</a> instead.</div>\n"); out.write("<div id=\"topTerms\"></div>\n"); out.write("<div id=\"histogram\"></div>\n"); out.write("</div>\n"); out.write("</body>\n"); out.write("</html>\n"); } catch (Throwable t) { if (!(t instanceof SkipPageException)){ out = _jspx_out; if (out != null && out.getBufferSize() != 0) out.clearBuffer(); if (_jspx_page_context != null) _jspx_page_context.handlePageException(t); } } finally { _jspxFactory.releasePageContext(_jspx_page_context); } } }
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.messageui; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSBundle; import apple.foundation.NSCoder; import apple.foundation.NSData; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import apple.messageui.protocol.MFMailComposeViewControllerDelegate; import apple.uikit.UINavigationController; import apple.uikit.UIViewController; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.MappedReturn; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; /** * MFMailComposeViewController * <p> * The MFMailComposeViewController class provides an interface for editing and sending email. * <p> * The MFMailComposeViewController class manages all user interaction. The client needs to set the recipient or * recipients. The client may also set the subject and the body of the message. Attachments may be added, if * so desired. After setup, the client needs to only display the view.</p>The provided delegate will be informed * of the user's composition completion and how they chose to complete the operation.<p>Prior to use, clients * should verify the user has set up the device for sending email via <tt>+[MFMailComposeViewController canSendMail]</tt>. */ @Generated @Library("MessageUI") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class MFMailComposeViewController extends UINavigationController { static { NatJ.register(); } @Generated protected MFMailComposeViewController(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native MFMailComposeViewController alloc(); @Owned @Generated @Selector("allocWithZone:") public static native MFMailComposeViewController allocWithZone(VoidPtr zone); @Generated @Selector("attemptRotationToDeviceOrientation") public static native void attemptRotationToDeviceOrientation(); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); /** * canSendMail * <p> * Returns <tt>YES</tt> if the user has set up the device for sending email. * <p> * The client may continue to set the recipients and content if the return value was <tt>YES</tt>. If <tt>NO</tt> * was the result, the client has a couple options. It may choose to simply notify the user of the inability to * send mail, or it may issue a "mailto" URL via <tt>-[UIApplication openURL:]</tt>. */ @Generated @Selector("canSendMail") public static native boolean canSendMail(); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("clearTextInputContextIdentifier:") public static native void clearTextInputContextIdentifier(String identifier); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Owned @Selector("new") public static native MFMailComposeViewController new_objc(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("version") @NInt public static native long version_static(); /** * addAttachmentData:mimeType:fileName: * <p> * This method adds the specified attachment to the email message. * <p> * This method adds the specified attachment to the email message. This should be called prior to display. * Attachments will be appended to the end of the message. * * @param attachment NSData containing the contents of the attachment. Must not be <tt>nil</tt>. * @param mimeType NSString specifying the MIME type for the attachment, as specified by the IANA * (http://www.iana.org/assignments/media-types/). Must not be <tt>nil</tt>. * @param filename NSString specifying the intended filename for the attachment. This is displayed below * the attachment's icon if the attachment is not decoded when displayed. Must not be <tt>nil</tt>. */ @Generated @Selector("addAttachmentData:mimeType:fileName:") public native void addAttachmentDataMimeTypeFileName(NSData attachment, String mimeType, String filename); @Generated @Selector("init") public native MFMailComposeViewController init(); @Generated @Selector("initWithCoder:") public native MFMailComposeViewController initWithCoder(NSCoder coder); @Generated @Selector("initWithNavigationBarClass:toolbarClass:") public native MFMailComposeViewController initWithNavigationBarClassToolbarClass(Class navigationBarClass, Class toolbarClass); @Generated @Selector("initWithNibName:bundle:") public native MFMailComposeViewController initWithNibNameBundle(String nibNameOrNil, NSBundle nibBundleOrNil); @Generated @Selector("initWithRootViewController:") public native MFMailComposeViewController initWithRootViewController(UIViewController rootViewController); /** * [@property] mailComposeDelegate * <p> * This property is the delegate for the MFMailComposeViewControllerDelegate method callbacks. */ @Generated @Selector("mailComposeDelegate") @MappedReturn(ObjCObjectMapper.class) public native MFMailComposeViewControllerDelegate mailComposeDelegate(); /** * setBccRecipients: * <p> * This method sets the BCC header for the email message to the specified email addresses. * <p> * This method will set the BCC header for the email message. This should be called prior to display. * </p>Recipient addresses should be specified as per RFC5322. * </p>After the view has been presented to the user, this method will no longer change the value. * * @param bccRecipients A NSArray of NSString instances specifying the email addresses of recipients. */ @Generated @Selector("setBccRecipients:") public native void setBccRecipients(NSArray<String> bccRecipients); /** * setCcRecipients: * <p> * This method sets the CC header for the email message to the specified email addresses. * <p> * This method will set the CC header for the email message. This should be called prior to display. * </p>Recipient addresses should be specified as per RFC5322. * </p>After the view has been presented to the user, this method will no longer change the value. * * @param ccRecipients A NSArray of NSString instances specifying the email addresses of recipients. */ @Generated @Selector("setCcRecipients:") public native void setCcRecipients(NSArray<String> ccRecipients); /** * [@property] mailComposeDelegate * <p> * This property is the delegate for the MFMailComposeViewControllerDelegate method callbacks. */ @Generated @Selector("setMailComposeDelegate:") public native void setMailComposeDelegate_unsafe( @Mapped(ObjCObjectMapper.class) MFMailComposeViewControllerDelegate value); /** * [@property] mailComposeDelegate * <p> * This property is the delegate for the MFMailComposeViewControllerDelegate method callbacks. */ @Generated public void setMailComposeDelegate(@Mapped(ObjCObjectMapper.class) MFMailComposeViewControllerDelegate value) { Object __old = mailComposeDelegate(); if (value != null) { org.moe.natj.objc.ObjCRuntime.associateObjCObject(this, value); } setMailComposeDelegate_unsafe(value); if (__old != null) { org.moe.natj.objc.ObjCRuntime.dissociateObjCObject(this, __old); } } /** * setMessageBody:isHTML: * <p> * This method sets the body of the email message to the specified content. * <p> * This method will set the body of the email message. This should be called prior to display. * The user's signature, if specified, will be added after the body content. * * @param body A NSString containing the body contents of the email message. * @param isHTML A boolean value indicating if the body argument is to be interpreted as HTML content. */ @Generated @Selector("setMessageBody:isHTML:") public native void setMessageBodyIsHTML(String body, boolean isHTML); /** * setSubject: * <p> * This method sets the Subject header for the email message. * <p> * This method will set the Subject header for the email message. This should be called prior to display. * Newlines are removed from the parameter. * </p>After the view has been presented to the user, this method will no longer change the value. * * @param subject A NSString specifying the message's Subject header. */ @Generated @Selector("setSubject:") public native void setSubject(String subject); /** * setToRecipients: * <p> * This method sets the To header for the email message to the specified email addresses. * <p> * This method will set the To header for the email message. This should be called prior to display. * </p>Recipient addresses should be specified as per RFC5322. * </p>After the view has been presented to the user, this method will no longer change the value. * * @param toRecipients A NSArray of NSString instances specifying the email addresses of recipients. */ @Generated @Selector("setToRecipients:") public native void setToRecipients(NSArray<String> toRecipients); /** * setPreferredSendingEmailAddress: * <p> * This method sets the preferred sending account of the email message. * <p> * This method will set the sending account of the message to the specified email address if the user has an account with such an address set up. If there is no account with such an address, the default account will be used instead. * The sending email address should be specified as per RFC5322. * After the view has been presented to the user, this method will no longer change the value. * * @param emailAddress A NSString specifying the preferred email address used to send this message. */ @Generated @Selector("setPreferredSendingEmailAddress:") public native void setPreferredSendingEmailAddress(String emailAddress); }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.hops.transaction.context; import io.hops.exception.StorageCallPreventedException; import io.hops.exception.StorageException; import io.hops.exception.TransactionContextException; import io.hops.metadata.common.FinderType; import io.hops.metadata.hdfs.dal.InvalidateBlockDataAccess; import io.hops.metadata.hdfs.entity.InvalidatedBlock; import io.hops.transaction.lock.TransactionLocks; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; public class InvalidatedBlockContext extends BaseReplicaContext<BlockPK.ReplicaPK, InvalidatedBlock> { private final InvalidateBlockDataAccess<InvalidatedBlock> dataAccess; private boolean allInvBlocksRead = false; public InvalidatedBlockContext(InvalidateBlockDataAccess dataAccess) { this.dataAccess = dataAccess; } @Override public void update(InvalidatedBlock hopInvalidatedBlock) throws TransactionContextException { super.update(hopInvalidatedBlock); if(isLogDebugEnabled()) { log("added-invblock", "bid", hopInvalidatedBlock.getBlockId(), "sid", hopInvalidatedBlock.getStorageId()); } } @Override public void remove(InvalidatedBlock hopInvalidatedBlock) throws TransactionContextException { super.remove(hopInvalidatedBlock); if(isLogDebugEnabled()) { log("removed-invblock", "bid", hopInvalidatedBlock.getBlockId(), "sid", hopInvalidatedBlock.getStorageId()); } } @Override public InvalidatedBlock find(FinderType<InvalidatedBlock> finder, Object... params) throws TransactionContextException, StorageException { InvalidatedBlock.Finder iFinder = (InvalidatedBlock.Finder) finder; switch (iFinder) { case ByBlockIdStorageIdAndINodeId: return findByPrimaryKey(iFinder, params); } throw new RuntimeException(UNSUPPORTED_FINDER); } @Override public Collection<InvalidatedBlock> findList( FinderType<InvalidatedBlock> finder, Object... params) throws TransactionContextException, StorageException { InvalidatedBlock.Finder iFinder = (InvalidatedBlock.Finder) finder; switch (iFinder) { case ByBlockIdAndINodeId: return findByBlockId(iFinder, params); case ByINodeId: return findByINodeId(iFinder, params); case All: return findAll(iFinder); case ByStorageId: return findByStorageId(iFinder, params); case ByINodeIds: return findByINodeIds(iFinder, params); } throw new RuntimeException(UNSUPPORTED_FINDER); } @Override public void prepare(TransactionLocks tlm) throws TransactionContextException, StorageException { dataAccess.prepare(getRemoved(), getAdded(), getModified()); } @Override public void clear() throws TransactionContextException { super.clear(); allInvBlocksRead = false; } @Override InvalidatedBlock cloneEntity(InvalidatedBlock hopInvalidatedBlock) { return cloneEntity(hopInvalidatedBlock, hopInvalidatedBlock.getInodeId()); } @Override InvalidatedBlock cloneEntity(InvalidatedBlock hopInvalidatedBlock, int inodeId) { return new InvalidatedBlock(hopInvalidatedBlock.getStorageId(), hopInvalidatedBlock.getBlockId(), inodeId); } @Override BlockPK.ReplicaPK getKey(InvalidatedBlock hopInvalidatedBlock) { return new BlockPK.ReplicaPK(hopInvalidatedBlock.getBlockId(), hopInvalidatedBlock.getInodeId(), hopInvalidatedBlock.getStorageId()); } @Override protected boolean snapshotChanged() { return !getRemoved().isEmpty(); } private InvalidatedBlock findByPrimaryKey(InvalidatedBlock.Finder iFinder, Object[] params) throws StorageCallPreventedException, StorageException { final long blockId = (Long) params[0]; final int storageId = (Integer) params[1]; final int inodeId = (Integer) params[2]; final BlockPK.ReplicaPK key = new BlockPK.ReplicaPK(blockId, inodeId, storageId); InvalidatedBlock result = null; if (contains(key) || containsByBlock(blockId) || containsByINode(inodeId)) { result = get(key); hit(iFinder, result, "bid", blockId, "sid", storageId, "inodeId", inodeId); } else { aboutToAccessStorage(iFinder, params); result = dataAccess.findInvBlockByPkey(blockId, storageId, inodeId); gotFromDB(key, result); miss(iFinder, result, "bid", blockId, "sid", storageId, "inodeId", inodeId); } return result; } private List<InvalidatedBlock> findByBlockId(InvalidatedBlock.Finder iFinder, Object[] params) throws StorageCallPreventedException, StorageException { final long blockId = (Long) params[0]; final int inodeId = (Integer) params[1]; List<InvalidatedBlock> result = null; if (containsByBlock(blockId) || containsByINode(inodeId)) { result = getByBlock(blockId); hit(iFinder, result, "bid", blockId, "inodeId", inodeId); } else { aboutToAccessStorage(iFinder, params); result = dataAccess.findInvalidatedBlocksByBlockId(blockId, inodeId); Collections.sort(result); gotFromDB(new BlockPK(blockId), result); miss(iFinder, result, "bid", blockId, "inodeId", inodeId); } return result; } private List<InvalidatedBlock> findByINodeId(InvalidatedBlock.Finder iFinder, Object[] params) throws StorageCallPreventedException, StorageException { final int inodeId = (Integer) params[0]; List<InvalidatedBlock> result = null; if (containsByINode(inodeId)) { result = getByINode(inodeId); hit(iFinder, result, "inodeId", inodeId); } else { aboutToAccessStorage(iFinder, params); result = dataAccess.findInvalidatedBlocksByINodeId(inodeId); gotFromDB(new BlockPK(inodeId), result); miss(iFinder, result, "inodeId", inodeId); } return result; } private List<InvalidatedBlock> findAll(InvalidatedBlock.Finder iFinder) throws StorageCallPreventedException, StorageException { List<InvalidatedBlock> result = null; if (allInvBlocksRead) { result = new ArrayList<>(getAll()); hit(iFinder, result); } else { aboutToAccessStorage(iFinder); result = dataAccess.findAllInvalidatedBlocks(); gotFromDB(result); allInvBlocksRead = true; miss(iFinder, result); } return result; } private List<InvalidatedBlock> findByStorageId( InvalidatedBlock.Finder iFinder, Object[] params) throws StorageCallPreventedException, StorageException { final long[] blockIds = (long[]) params[0]; final int[] inodeIds = (int[]) params[1]; final int sid = (Integer) params[2]; aboutToAccessStorage(iFinder, params); List<InvalidatedBlock> result = dataAccess.findInvalidatedBlockByStorageId(sid); gotFromDB(BlockPK.ReplicaPK.getKeys(blockIds, inodeIds, sid), result); miss(iFinder, result, "bids", Arrays.toString(blockIds), "inodeIds", Arrays.toString(inodeIds), "sid", sid); return result; } private List<InvalidatedBlock> findByINodeIds(InvalidatedBlock.Finder iFinder, Object[] params) throws StorageCallPreventedException, StorageException { final int[] inodeIds = (int[]) params[0]; aboutToAccessStorage(iFinder, params); List<InvalidatedBlock> result = dataAccess.findInvalidatedBlocksByINodeIds(inodeIds); gotFromDB(BlockPK.ReplicaPK.getKeys(inodeIds), result); miss(iFinder, result, "inodeIds", Arrays.toString(inodeIds)); return result; } }
/** * Copyright 2005-2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.itests.autoscale; import io.fabric8.api.FabricRequirements; import io.fabric8.utils.Closeables; import io.fabric8.utils.Strings; import io.fabric8.utils.XmlUtils; import io.fabric8.testkit.FabricAssertions; import io.fabric8.testkit.FabricController; import org.jboss.arquillian.junit.Arquillian; import org.jboss.arquillian.test.api.ArquillianResource; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(Arquillian.class) public class ArchetypeTest { public static final String ARTIFACTID_SYSTEM_PROPERTY = "ArchetypeTest.artifactId"; private static final transient Logger LOG = LoggerFactory.getLogger(ArchetypeTest.class); @Parameterized.Parameter private String archetypeId; @ArquillianResource protected FabricController fabricController; @Rule public ParameterRule<String> rule = new ParameterRule<>(findArchetypeIds()); protected static Map<String, ArchetypeInfo> archetypeIdToArchetypeInfoMap = new TreeMap<>(); boolean addedBroker = false; /** * Returns all the available artifact Ids for the archetypes, filtering out any known * broken archetypes; or just a single artifact id if the {@link #ARTIFACTID_SYSTEM_PROPERTY} * system property is set (making it easy to test just a single archetype id). */ public static Set<String> findArchetypeIds() { try { List<ArchetypeInfo> archetypes = findArchetypes(); for (ArchetypeInfo archetype : archetypes) { archetypeIdToArchetypeInfoMap.put(archetype.artifactId, archetype); } Set<String> artifactIds = archetypeIdToArchetypeInfoMap.keySet(); Set<String> answer = new TreeSet<>(); // lets allow a specific archetypes to be run via a system property... String testArtifactId = System.getProperty(ARTIFACTID_SYSTEM_PROPERTY); for (String artifactId : artifactIds) { boolean ignore = false; if (Strings.isNotBlank(testArtifactId)) { if (!artifactId.contains(testArtifactId)) { ignore = true; } } else { // TODO lets ignore broken archetypes if (artifactId.contains("drools")) { ignore = true; } } if (ignore) { ParameterRule.addIgnoredTest("ArchetypeTest(" + artifactId + ")"); } else { answer.add(artifactId); } } if (Strings.isNotBlank(testArtifactId) && answer.isEmpty()) { fail("System property " + ARTIFACTID_SYSTEM_PROPERTY + " value of '" + testArtifactId + "' is not a valid artifact id for the fabric8 archetypes"); } return answer; } catch (Exception e) { LOG.error("Failed to find archetype IDs: " + e, e); e.printStackTrace(); fail("Failed to find archetype ids: " + e); return Collections.EMPTY_SET; } } @Override public String toString() { return "ArchetypeTest(" + archetypeId + ")"; } @Test public void testCreateArchetype() throws Exception { ArchetypeInfo archetype = archetypeIdToArchetypeInfoMap.get(archetypeId); assertNotNull("No archetype found for id: " + archetypeId, archetype); File mavenSettingsFile = getMavenSettingsFile(); assertFileExists(mavenSettingsFile); // create a fabric // generate and deploy archetypes File workDir = new File(System.getProperty("basedir", "."), "target/generated-projects"); workDir.mkdirs(); String profileId = assertGenerateArchetype(archetype, workDir, mavenSettingsFile); assertNotNull("Should have a profile ID for " + archetype, profileId); FabricRequirements requirements = fabricController.getRequirements(); if (!addedBroker) { addedBroker = true; requirements.profile("mq-default").minimumInstances(1); FabricAssertions.assertRequirementsSatisfied(fabricController, requirements); } // deploying each profile should have caused the requirements to be updated to add them all now // so lets load the requirements and assert they are satisfied requirements.profile(profileId).minimumInstances(1); FabricAssertions.assertRequirementsSatisfied(fabricController, requirements); System.out.println(); System.out.println("Managed to create a container for " + profileId + ". Now lets stop it"); System.out.println(); // now lets force the container to be stopped requirements.profile(profileId).minimumInstances(0).maximumInstances(0); FabricAssertions.assertRequirementsSatisfied(fabricController, requirements); System.out.println(); System.out.println("Stopped a container for " + profileId + ". Now lets clear requirements"); System.out.println(); requirements.removeProfileRequirements(profileId); FabricAssertions.assertRequirementsSatisfied(fabricController, requirements); System.out.println(); System.out.println("Removed requirements for profile " + profileId); System.out.println(); } protected String assertGenerateArchetype(ArchetypeInfo archetype, File workDir, File mavenSettingsFile) throws Exception { System.out.println(); System.out.println(archetype.groupId + "/" + archetype.artifactId + "/" + archetype.version + " : generate archetype..."); System.out.println("======================================================================================"); System.out.println(); System.out.println(); System.out.println("in folder: " + workDir.getCanonicalPath() + " from " + archetype); List<String> commands = new ArrayList<String>(); String groupId = "dummy.itest"; String artifactId = "mytest-" + archetype.artifactId; String archetypePostfix = "-archetype"; if (artifactId.endsWith(archetypePostfix)) { artifactId = artifactId.substring(0, artifactId.length() - archetypePostfix.length()); } String version = "1.2.0-SNAPSHOT"; String packageName = (groupId + "." + artifactId).replace('-', '.'); String mvn = "mvn"; commands.addAll(Arrays.asList(mvn, "org.apache.maven.plugins:maven-archetype-plugin:2.2:generate", property("interactiveMode", "false"), property("archetypeGroupId", archetype.groupId), property("archetypeArtifactId", archetype.artifactId), property("archetypeVersion", archetype.version), property("groupId", groupId), property("artifactId", artifactId), property("version", version), property("package", packageName), property("fabric8.profile", artifactId) )); assertExecuteCommand(commands, workDir); File projectDir = new File(workDir, artifactId); assertFolderExists(projectDir); File projectPom = new File(projectDir, "pom.xml"); assertValidGeneratedArchetypePom(projectPom); commands = new ArrayList<String>(); String profileId = artifactId; commands.addAll(Arrays.asList(mvn, "--settings", mavenSettingsFile.getCanonicalPath(), "clean", "fabric8:deploy", property("fabric8.profile", profileId), property("fabric8.minInstanceCount", "0") )); System.out.println(); System.out.println(); System.out.println("======================================================================================"); System.out.println("building with maven in dir: " + projectDir.getCanonicalPath()); System.out.println(commands); System.out.println("======================================================================================"); System.out.println(); System.out.println(); assertExecuteCommand(commands, projectDir); return profileId; } protected void assertValidGeneratedArchetypePom(File projectPom) throws Exception { assertFileExists(projectPom); // lets check we define a profile ID Document document = XmlUtils.parseDoc(projectPom); List<Element> elementList = XmlUtils.getElements(document, "properties"); String pomFileName = projectPom.getCanonicalPath(); assertTrue("Should have found a <properties> element in " + pomFileName, elementList.size() > 0); Element propertiesElement = elementList.get(0); String profileId = XmlUtils.getTextContentOfElement(propertiesElement, "fabric8.profile"); assertTrue("Should have found a <fabric8.profile> value in the <properties> of " + pomFileName + " but was: " + profileId, Strings.isNotBlank(profileId)); } protected void assertExecuteCommand(List<String> commands, File workDir) throws IOException, InterruptedException { ProcessBuilder builder = new ProcessBuilder(commands).directory(workDir).redirectErrorStream(true); Process process = builder.start(); BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); try { while (true) { String line = reader.readLine(); if (line == null) break; System.out.println(line); } } catch (Exception e) { LOG.error("Failed to process results of " + commands + ": " + e, e); } finally { Closeables.closeQuietly(reader); } int exitCode = process.waitFor(); System.out.println("command exit code: " + exitCode); assertEquals("process exit code for " + commands, 0, exitCode); } protected static String property(String name, String value) { return "-D" + name + "=" + value; } public static List<ArchetypeInfo> findArchetypes() throws Exception { File archetypeCatalogXml = getArchetypeCatalog(); List<ArchetypeInfo> answer = new ArrayList<ArchetypeInfo>(); Document document = XmlUtils.parseDoc(archetypeCatalogXml); List<Element> elementList = XmlUtils.getElements(document, "archetype"); assertTrue("Should have found at least one archetype in the catalog file " + archetypeCatalogXml, elementList.size() > 0); for (Element element : elementList) { String groupId = XmlUtils.getTextContentOfElement(element, "groupId"); String artifactId = XmlUtils.getTextContentOfElement(element, "artifactId"); String version = XmlUtils.getTextContentOfElement(element, "version"); String repository = XmlUtils.getTextContentOfElement(element, "repository"); assertNotBlank("groupId", groupId); assertNotBlank("artifactId", artifactId); assertNotBlank("version", version); ArchetypeInfo info = new ArchetypeInfo(groupId, artifactId, version, repository); answer.add(info); System.out.println("Created " + info); } return answer; } protected static File getRootProjectDir() throws IOException { String basedir = System.getProperty("basedir", "."); File answer = new File(basedir, "../../.."); assertFolderExists(answer); return answer; } protected static File getArchetypeCatalog() throws IOException { File answer = new File(getRootProjectDir(), "tooling/archetype-builder/target/classes/archetype-catalog.xml"); assertFileExists(answer); return answer; } protected static File getMavenSettingsFile() throws IOException { File answer = new File(getRootProjectDir(), "itests/paxexam/basic/src/test/resources/maven-settings.xml"); assertFileExists(answer); return answer; } public static void assertNotBlank(String name, String text) { assertTrue("name should not be blank: " + text, Strings.isNotBlank(text)); } public static void assertFolderExists(File dir) throws IOException { assertTrue("the folder does not exist! " + dir.getCanonicalPath(), dir.exists()); assertTrue("the path is not a folder! " + dir.getCanonicalPath(), dir.isDirectory()); } public static void assertFileExists(File file) throws IOException { assertTrue("the file does not exist! " + file.getCanonicalPath(), file.exists()); assertTrue("the path is not a file! " + file.getCanonicalPath(), file.isFile()); } public static class ArchetypeInfo { private final String groupId; private final String artifactId; private final String version; private final String repository; public ArchetypeInfo(String groupId, String artifactId, String version, String repository) { this.groupId = groupId; this.artifactId = artifactId; this.version = version; this.repository = repository; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ArchetypeInfo that = (ArchetypeInfo) o; if (!artifactId.equals(that.artifactId)) return false; if (!groupId.equals(that.groupId)) return false; if (!version.equals(that.version)) return false; return true; } @Override public int hashCode() { int result = groupId.hashCode(); result = 31 * result + artifactId.hashCode(); result = 31 * result + version.hashCode(); return result; } @Override public String toString() { return "ArchetypeInfo{" + "groupId='" + groupId + '\'' + ", artifactId='" + artifactId + '\'' + ", version='" + version + '\'' + ", repository='" + repository + '\'' + '}'; } } }
package slogo_front; import java.util.ArrayList; import java.util.Locale; import java.util.ResourceBundle; import slogo_back.Model; import view_panels.CommandHistory; import view_panels.CommandLine; import view_panels.ControlPanel; import view_panels.ToolBar; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.canvas.Canvas; import javafx.scene.control.MenuItem; import javafx.scene.control.TextField; import javafx.scene.input.KeyEvent; import javafx.scene.input.MouseEvent; import javafx.scene.layout.BorderPane; import javafx.scene.layout.Pane; import javafx.scene.paint.Color; // TODO please comment this code tonight /** * add comments here * * */ public class View { private Scene scene; private Manager manager; // components of view private BorderPane root; private ControlPanel control; private CommandHistory history; private ToolBar tools; private CommandLine commandLine; // probably going to remove this instance private Display activeDisplay; // keeps track of the multiple displays private ArrayList<Display> displayList = new ArrayList<>(); // private VBox displayBackground ; // Canvas Dimensions private static final int xCanvas = 1000; private static final int yCanvas = 600; // Command Line Dimenstions private static final int COMMAND_HEIGHT = 50; // Locale Resources ResourceBundle labels; Locale[] supportedLocales = { Locale.ENGLISH, Locale.FRENCH }; Locale defaultLocale = Locale.ENGLISH; // TODO add array for turtles // TODO add "addTurtle()" method public View() { // resource bundles // activeDisplay = new Display(xCanvas, yCanvas); // displayList.add(activeDisplay); labels = ResourceBundle.getBundle("resources.languages/LabelsBundle", defaultLocale); root = new BorderPane(); control = new ControlPanel(10, 200); control.setView(this); history = new CommandHistory(200); tools = new ToolBar(this); commandLine = new CommandLine(COMMAND_HEIGHT); root.setTop(tools.getToolBar()); root.setRight(history.getCommandHistory()); root.setBottom(commandLine.getCommandLine()); // root.setCenter(null); // Pane placeHolder = new Pane(); Display placeHolder = new Display(xCanvas, yCanvas); placeHolder.changeBackground(Color.GRAY); root.setCenter(placeHolder.getDisplay()); root.setLeft(control.getControlpanel()); scene = new Scene(root); scene.getStylesheets().add("css/view.css"); // initializeDisplays(); initializeToolbarHandlers(); } // private void initializeDisplays() { // // // } // TODO private void initializeToolbarHandlers() { tools.setMenuPlusHandler(addDisplay); } public CommandLine getCommandLine() { return commandLine; } public CommandHistory getCommandHistory() { return history; } public Display getActiveDisplay() { return activeDisplay; } // attach new display when active display set public void setActiveDisplay(int displayIndex) { // if (displayIndex >= displayList.size()) { // System.out.println("too big"); // return; // } activeDisplay = displayList.get(displayIndex); root.setCenter(activeDisplay.getDisplay()); return; } public void createDisplay() { // activeDisplay = new Display(xCanvas, yCanvas); displayList.add(new Display(xCanvas, yCanvas)); } public Scene getScene() { return scene; } // gets and clears command line text // TODO handlers need to be working // protected String commandLineText() { // return null; // // String parse = commandLine.getText(); // // commandLine.clear(); // // return parse; // } // figureout if this should be private somehow // protected void addHistoryText(String text) { // commandItems.add(">> " + text); // commandList.setItems(commandItems); // } // protected void addVariableText(String variable, double value) { // variables.put(variable, value); // variableItems.clear(); // for (String s : variables.keySet()) { // variableItems.add(s + ": " + variables.get(s)); // } // varList.setItems(variableItems); // } // private in the future? // protected void clearHistoryText() { // history.getChildren().clear(); // } // private Node makeMenu() { // menuBar = new MenuBar(); // // Command Reference Sheet // Menu menuHelp = new Menu("Help"); // MenuItem commandReference = new MenuItem("Command Reference"); // commandReference.setOnAction(launchWebView); // menuHelp.getItems().add(commandReference); // // // Language // Menu menuLanguage = new Menu("Change Language"); // setLanguages(menuLanguage); // // setLanguageListener(menuLanguage); // menuBar.getMenus().addAll(menuHelp, menuLanguage); // return menuBar; // // } // private void setLanguages(Menu languageMenu) { // MenuItem english = new MenuItem("English"); // english.setOnAction(new EventHandler<ActionEvent>() { // public void handle(ActionEvent t) { // labels = ResourceBundle // .getBundle("resources.languages/LabelsBundle", // supportedLocales[0]); // //TODO setLabels(); // } // }); // // MenuItem french = new MenuItem("French"); // french.setOnAction(new EventHandler<ActionEvent>() { // public void handle(ActionEvent t) { // labels = ResourceBundle // .getBundle("resources.languages/LabelsBundle", // supportedLocales[1]); // //TODO setLabels(); // } // }); // // languageMenu.getItems().addAll(english, french); // } // private Node makeCommandHistory() { // commandList = new ListView<String>(); // commandList.setMaxWidth(200); // resetHistory(); // // return commandList; // } // private void resetHistory(){ // commandItems = FXCollections.observableArrayList("Command History"); // commandList.setItems(commandItems); // // commandList.setOnMouseClicked(historyEvent); // } // set manager public void setManager(Manager m) { manager = m; } public Manager getManager() { return manager; } public CommandHistory getHistory(){ return history; } // sets command line public void setCommandLine(EventHandler<KeyEvent> handler) { commandLine.setCommandHandler(handler); } public void setCommandHistory(EventHandler<MouseEvent> handler) { history.setCommandHandler(handler); } private EventHandler<ActionEvent> addDisplay = new EventHandler<ActionEvent>() { public void handle(ActionEvent event) { MenuItem display = new MenuItem("Display " + displayList.size()); display.setId("" + displayList.size()); display.setOnAction(new EventHandler<ActionEvent>() { public void handle(ActionEvent event) { setActiveDisplay(Integer.parseInt(display.getId())); } }); //add menuitem to the menu createDisplay(); tools.addMenuPlusItem(display); } }; private EventHandler<MouseEvent> changeTurtleImage = new EventHandler<MouseEvent>() { // within manager? public void handle(MouseEvent event) { // Stage fileSystem = new Stage(); // FileChooser fileChooser = new FileChooser(); // fileChooser.setTitle("Open Resource File"); // File file = fileChooser.showOpenDialog(fileSystem); // // some method to change the imageview in display // String filePath = "C:"+file.getPath(); // int index = filePath.indexOf("/images"); // // System.out.println(index); // // System.out.println(filePath); // // System.out.println(filePath.substring(index)); // Turtle turtle = manager.getTurtle(); // display.hide(turtle); // manager.getTurtle().setImage(filePath.substring(index)); // display.show(turtle); } }; private EventHandler<MouseEvent> clear = new EventHandler<MouseEvent>() { public void handle(MouseEvent event) { // display.clearScreen(manager.getTurtle()); // addHistoryText("clearscreen"); } }; // private EventHandler<MouseEvent> clearHistory = new // EventHandler<MouseEvent>() { // public void handle(MouseEvent event) { // resetHistory(); // } // // }; // private EventHandler<MouseEvent> changeBackground = new EventHandler<MouseEvent>() { // public void handle(MouseEvent event) { // activeDisplay.changeBackground(backgroundColor);changeBackground(turtleColor.getValue()); // } // // }; private EventHandler<MouseEvent> turnEvent = new EventHandler<MouseEvent>() { public void handle(MouseEvent event) { // TODO turtle within display or in view? think about allowances for // multiple turtles // double degree = turnDegree.getValue(); // display.setHeading(manager.getTurtle(), degree); // if(degree >= 0){ // addHistoryText("left " + (int) degree); // }else{ // addHistoryText("right " + (int) Math.abs(degree)); // } } }; private EventHandler<MouseEvent> changePenColor = new EventHandler<MouseEvent>() { public void handle(MouseEvent event) { // manager.getTurtle().setPenColor(turtleColor.getValue()); } }; // opens window for help page // private EventHandler<ActionEvent> launchWebView = new // EventHandler<ActionEvent>() { // public void handle(ActionEvent event) { // // browser.getEngine().load( // getClass().getResource("/html/english.html") // .toExternalForm()); // Stage popUp = new Stage(); // popUp.setScene(secondScene); // // popUp.show(); // } // // }; }
package org.terifan.util; import java.lang.reflect.Array; import java.util.Arrays; import java.util.Collection; import java.util.Map; public class Strings { /** * Matches String that are not null, not empty and match the regular expression. */ public static boolean matches(String aString, String aRegex) { return aString != null && !aString.isEmpty() && aString.matches(aRegex); } /** * Matches String that are either null, empty or match the regular expression. */ public static boolean emptyOrMatches(String aString, String aRegex) { return aString == null || aString.equals("") || aString.matches(aRegex); } public static String nullToEmpty(Object aString) { return aString == null ? "" : aString.toString(); } public static String toString(byte[] aString) { return aString == null ? "" : new String(aString); } public static String nullToEmpty(char[] aString) { return aString == null ? "" : new String(aString); } public static boolean isNumeric(String aString) { for (int i = 0, sz = aString.length(); i < sz; i++) { if (!Character.isDigit(aString.charAt(i))) { return false; } } return true; } /** * Check if the String provided is null or empty. * * @param aString * a String to test * @return * true if null or empty */ public static boolean isEmptyOrNull(String aString) { return aString == null || aString.isEmpty(); } /** * Check if the String provided is not null or empty. * * @param aString * a String to test * @return * true if not null or empty */ public static boolean isNotEmptyOrNull(String aString) { return aString != null && !aString.isEmpty(); } /** * Compares two words for similarity and return true if the words have less * differences than the maximum threshold specified. * * @param aTemplate * the word to compare against * @param aCompareWith * the word to compare with * @param aCaseSensitive * true if the comparison should be case senastive * @param aTrimCompare * differences beyond the end of the template will not be counted * @return * true if the words match */ public static int compareWords(String aTemplate, String aCompareWith, boolean aCaseSensitive, boolean aTrimCompare) { if (!aCaseSensitive) { aTemplate = aTemplate.toLowerCase(); aCompareWith = aCompareWith.toLowerCase(); } return compareWords(aTemplate, aCompareWith, 0, 0, Integer.MAX_VALUE, 0, aTrimCompare); } /** * Compares two words for similarity and return number of differences. * * @param aTemplate * the word to compare against * @param aCompareWith * the word to compare with * @param aMaxErrors * number of mismatching characters permitted before the test is aborted * @param aCaseSensitive * true if the comparison should be case sensitive * @param aTrimCompare * differences beyond the end of the template will not be counted * @return * number of different characters */ public static boolean compareWords(String aTemplate, String aCompareWith, int aMaxErrors, boolean aCaseSensitive, boolean aTrimCompare) { if (!aCaseSensitive) { aTemplate = aTemplate.toLowerCase(); aCompareWith = aCompareWith.toLowerCase(); } int err = compareWords(aTemplate, aCompareWith, 0, 0, aMaxErrors, 0, aTrimCompare); return err <= aMaxErrors; } private static int compareWords(String aTemplate, String aCompare, int aTemplateOffset, int aCompareOffset, int aMaxErrors, int aAccumulatedErrors, boolean aTrimCompare) { for (; aTemplateOffset < aTemplate.length() && aCompareOffset < aCompare.length(); aTemplateOffset++, aCompareOffset++) { if (aTemplate.charAt(aTemplateOffset) != aCompare.charAt(aCompareOffset)) { aAccumulatedErrors++; if (aAccumulatedErrors > aMaxErrors) { return aAccumulatedErrors; } int e1 = compareWords(aTemplate, aCompare, aTemplateOffset + 1, aCompareOffset, aMaxErrors, aAccumulatedErrors, aTrimCompare); int e2 = compareWords(aTemplate, aCompare, aTemplateOffset, aCompareOffset + 1, aMaxErrors, aAccumulatedErrors, aTrimCompare); int e3 = compareWords(aTemplate, aCompare, aTemplateOffset + 1, aCompareOffset + 1, aMaxErrors, aAccumulatedErrors, aTrimCompare); return Math.min(e1, Math.min(e2, e3)); } } if (aTrimCompare && aTemplateOffset == aTemplate.length()) { return aAccumulatedErrors; } return aAccumulatedErrors + Math.abs((aTemplate.length() - aTemplateOffset) - (aCompare.length() - aCompareOffset)); } public static Integer toInteger(String aString, Integer aDefaultValue) { if (isEmptyOrNull(aString)) { return aDefaultValue; } try { return Integer.valueOf(aString); } catch (Throwable e) { return aDefaultValue; } } public static Long toLong(String aString, Long aDefaultValue) { if (isEmptyOrNull(aString)) { return aDefaultValue; } try { return Long.valueOf(aString); } catch (Throwable e) { return aDefaultValue; } } public static Double toDouble(String aString, Double aDefaultValue) { if (isEmptyOrNull(aString)) { return aDefaultValue; } try { return Double.valueOf(aString); } catch (Throwable e) { return aDefaultValue; } } public static String repeat(char aCharacter, int aLength) { char [] buf = new char[aLength]; Arrays.fill(buf, aCharacter); return new String(buf); } public static String repeat(String aWord, int aLength) { char [] buf = new char[aLength]; char [] src = aWord.toCharArray(); for (int i = 0; i < aLength; ) { for (int j = 0; i < aLength && j < src.length; j++, i++) { buf[i] = src[j]; } } return new String(buf); } /** * Converts the provided Object to a String. If the value is null then value * returned is null. * * @param aValue * a value to convert. * @return * a String (which can be null). */ public static String asString(Object aValue) { return aValue == null ? null : aValue.toString(); } /** * Return a comma separated list of all items in the list. * @param aList * a list of items * @return * a String of all items */ public static String listToString(Collection aList) { StringBuilder sb = new StringBuilder(); for (Object item : aList) { if (sb.length() > 0) { sb.append(','); } sb.append(item); } return sb.toString(); } public static String arrayToString(Object aArray) { if (aArray != null && aArray.getClass().isArray()) { StringBuilder sb = new StringBuilder("["); for (int i = 0; i < Array.getLength(aArray); i++) { if (i > 0) { sb.append(','); } sb.append(Array.get(aArray, i)); } return sb.append("]").toString(); } return asString(aArray); } public static String listToString(Object... aList) { StringBuilder sb = new StringBuilder(); for (Object item : aList) { if (sb.length() > 0) { sb.append(','); } sb.append(item); } return sb.toString(); } public static String emptyToNull(String aString) { return aString == null || aString.isEmpty() ? null : aString; } /** * Join two strings inserting the separator between ensuring the separator only exists once. * * E.g: join("c:/files/", "/", "/my_file.txt") returns "c:/files/my_file.txt". * * @return * strings join with the separator in-between. */ public static String concat(String aHead, String aSeparator, String aTail) { if (!aSeparator.isEmpty()) { while (aHead.endsWith(aSeparator)) { aHead = aHead.substring(0, aHead.length()-1); } while (aTail.startsWith(aSeparator)) { aTail = aTail.substring(1); } } return aHead + (!aHead.isEmpty() && !aTail.isEmpty() ? aSeparator : "") + aTail; } /** * Joins the string excluding any empty or null parts. * * @param aSeparator * separator between parts * @param aStrings * parts to join * @return * the joined strings */ public static String join(String aSeparator, String... aStrings) { return join(aSeparator, false, aStrings); } /** * Joins the string excluding any null parts. * * @param aSeparator * separator between parts * @param aIncludeEmptyParts * true if empty parts should be included * @param aStrings * parts to join * @return * the joined strings */ public static String join(String aSeparator, boolean aIncludeEmptyParts, String... aStrings) { if (aStrings == null) { return null; } if (aStrings.length == 0) { return ""; } StringBuilder sb = new StringBuilder(); for (Object s : aStrings) { if (s != null && (aIncludeEmptyParts || !s.toString().isEmpty())) { if (sb.length() > 0) { sb.append(aSeparator); } sb.append(s); } } return sb.toString(); } public static String join(String aSeparator, int aFirstIndex, int aLastIndex, FunctionEx<Integer,String> aProducer) { try { StringBuilder err = new StringBuilder(); for (int i = aFirstIndex; i <= aLastIndex; i++) { if (err.length() > 0) { err.append(aSeparator); } err.append(aProducer.apply(i)); } return err.toString(); } catch (Exception e) { throw new IllegalStateException(e); } } public static String replaceNull(String aString, String aReplacedWith) { return aString == null ? aReplacedWith : aString; } public static String replaceEmptyOrNull(String aString, String aReplacedWith) { return isEmptyOrNull(aString) ? aReplacedWith : aString; } public static String toTimeString(long aMillis) { return String.format("%d:%02d:%02d.%03d", aMillis/60/60/1000, (aMillis/60/1000)%60, (aMillis/1000)%60, aMillis%1000); } public static String replaceParams(String aText, Map<String,Object> aParams) { return replaceParams(aText, e->"" + aParams.get(e)); } public static String replaceParams(String aText, StringLookup aParamProvider) { return replaceParams("${", "}", aText, aParamProvider); } public static String replaceParams(String aKeywordPrefix, String aKeywordSuffix, String aText, StringLookup aParamProvider) { StringBuilder text = new StringBuilder(aText.length()); int prefixLength = aKeywordPrefix.length(); int suffixLength = aKeywordSuffix.length(); for (int i = 0, sz = aText.length(); i < sz; i++) { if (aText.startsWith(aKeywordPrefix, i)) { int j = aText.indexOf(aKeywordSuffix, i + prefixLength); if (j != -1) { String name = aText.substring(i + prefixLength, j); Object header = aParamProvider.get(name); if (header != null) { text.append(header); i = j + suffixLength - 1; continue; } } } text.append(aText.charAt(i)); } return text.toString(); } public static String removeStart(String aString, String aPrefix) { while (aString.startsWith(aPrefix)) { aString = aString.substring(aPrefix.length()); } return aString; } public static String removeEnd(String aString, String aSuffix) { while (aString.endsWith(aSuffix)) { aString = aString.substring(0, aString.length() - aSuffix.length()); } return aString; } public static int indexOf(String aString, String... aTokens) { int i = -1; for (String s : aTokens) { int j = aString.indexOf(s); if (j != -1 && (j < i || i == -1)) { i = j; } } return i; } @FunctionalInterface public interface StringLookup { String get(String aName); } /** * Removes all characters not being either letter, digits or underscore from the string. */ public static String sanitizeString(String aString) { StringBuilder sb = new StringBuilder(aString.length()); for (int i = 0; i < aString.length(); i++) { char c = aString.charAt(i); if (Character.isLetterOrDigit(c) || c == '_') { sb.append(c); } } return sb.toString(); } @FunctionalInterface public interface FunctionEx<T,R> { R apply(T aParam) throws Exception; } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package fr.isep.simizer.utils; /** * The MurmurHash3 algorithm was created by Austin Appleby. This java port was authored by * Yonik Seeley and is placed into the public domain. The author hereby disclaims copyright * to this source code. * <p> * This produces exactly the same hash values as the final C++ * version of MurmurHash3 and is thus suitable for producing the same hash values across * platforms. * <p> * The 32 bit x86 version of this hash should be the fastest variant for relatively short keys like ids. * <p> * Note - The x86 and x64 versions do _not_ produce the same results, as the * algorithms are optimized for their respective platforms. * <p> * See http://github.com/yonik/java_util for future updates to this file. */ public class MurmurHash3 { /** Returns the MurmurHash3_x86_32 hash. */ public static int murmurhash3_x86_32(byte[] data, int offset, int len, int seed) { final int c1 = 0xcc9e2d51; final int c2 = 0x1b873593; int h1 = seed; int roundedEnd = offset + (len & 0xfffffffc); // round down to 4 byte block for (int i=offset; i<roundedEnd; i+=4) { // little endian load order int k1 = (data[i] & 0xff) | ((data[i+1] & 0xff) << 8) | ((data[i+2] & 0xff) << 16) | (data[i+3] << 24); k1 *= c1; k1 = (k1 << 15) | (k1 >>> 17); // ROTL32(k1,15); k1 *= c2; h1 ^= k1; h1 = (h1 << 13) | (h1 >>> 19); // ROTL32(h1,13); h1 = h1*5+0xe6546b64; } // tail int k1 = 0; switch(len & 0x03) { case 3: k1 = (data[roundedEnd + 2] & 0xff) << 16; // fallthrough case 2: k1 |= (data[roundedEnd + 1] & 0xff) << 8; // fallthrough case 1: k1 |= (data[roundedEnd] & 0xff); k1 *= c1; k1 = (k1 << 15) | (k1 >>> 17); // ROTL32(k1,15); k1 *= c2; h1 ^= k1; } // finalization h1 ^= len; // fmix(h1); h1 ^= h1 >>> 16; h1 *= 0x85ebca6b; h1 ^= h1 >>> 13; h1 *= 0xc2b2ae35; h1 ^= h1 >>> 16; return h1; } /** Returns the MurmurHash3_x86_32 hash of the UTF-8 bytes of the String without actually encoding * the string to a temporary buffer. This is more than 2x faster than hashing the result * of String.getBytes(). */ public static int murmurhash3_x86_32(CharSequence data, int offset, int len, int seed) { final int c1 = 0xcc9e2d51; final int c2 = 0x1b873593; int h1 = seed; int pos = offset; int end = offset + len; int k1 = 0; int k2 = 0; int shift = 0; int bits = 0; int nBytes = 0; // length in UTF8 bytes while (pos < end) { int code = data.charAt(pos++); if (code < 0x80) { k2 = code; bits = 8; /*** // optimized ascii implementation (currently slower!!! code size?) if (shift == 24) { k1 = k1 | (code << 24); k1 *= c1; k1 = (k1 << 15) | (k1 >>> 17); // ROTL32(k1,15); k1 *= c2; h1 ^= k1; h1 = (h1 << 13) | (h1 >>> 19); // ROTL32(h1,13); h1 = h1*5+0xe6546b64; shift = 0; nBytes += 4; k1 = 0; } else { k1 |= code << shift; shift += 8; } continue; ***/ } else if (code < 0x800) { k2 = (0xC0 | (code >> 6)) | ((0x80 | (code & 0x3F)) << 8); bits = 16; } else if (code < 0xD800 || code > 0xDFFF || pos>=end) { // we check for pos>=end to encode an unpaired surrogate as 3 bytes. k2 = (0xE0 | (code >> 12)) | ((0x80 | ((code >> 6) & 0x3F)) << 8) | ((0x80 | (code & 0x3F)) << 16); bits = 24; } else { // surrogate pair // int utf32 = pos < end ? (int) data.charAt(pos++) : 0; int utf32 = (int) data.charAt(pos++); utf32 = ((code - 0xD7C0) << 10) + (utf32 & 0x3FF); k2 = (0xff & (0xF0 | (utf32 >> 18))) | ((0x80 | ((utf32 >> 12) & 0x3F))) << 8 | ((0x80 | ((utf32 >> 6) & 0x3F))) << 16 | (0x80 | (utf32 & 0x3F)) << 24; bits = 32; } k1 |= k2 << shift; // int used_bits = 32 - shift; // how many bits of k2 were used in k1. // int unused_bits = bits - used_bits; // (bits-(32-shift)) == bits+shift-32 == bits-newshift shift += bits; if (shift >= 32) { // mix after we have a complete word k1 *= c1; k1 = (k1 << 15) | (k1 >>> 17); // ROTL32(k1,15); k1 *= c2; h1 ^= k1; h1 = (h1 << 13) | (h1 >>> 19); // ROTL32(h1,13); h1 = h1*5+0xe6546b64; shift -= 32; // unfortunately, java won't let you shift 32 bits off, so we need to check for 0 if (shift != 0) { k1 = k2 >>> (bits-shift); // bits used == bits - newshift } else { k1 = 0; } nBytes += 4; } } // inner // handle tail if (shift > 0) { nBytes += shift >> 3; k1 *= c1; k1 = (k1 << 15) | (k1 >>> 17); // ROTL32(k1,15); k1 *= c2; h1 ^= k1; } // finalization h1 ^= nBytes; // fmix(h1); h1 ^= h1 >>> 16; h1 *= 0x85ebca6b; h1 ^= h1 >>> 13; h1 *= 0xc2b2ae35; h1 ^= h1 >>> 16; return h1; } }
/** * www.TheAIGames.com * Heads Up Omaha pokerbot * * Last update: May 07, 2014 * * @author Jim van Eeden, Starapple * @version 1.0 * @License MIT License (http://opensource.org/Licenses/MIT) */ package bot; import java.util.HashMap; import com.stevebrecher.HandEval; import com.stevebrecher.HandEval.HandCategory; import be.stilkin.HandParser; import be.stilkin.StartingHands; import poker.Card; import poker.HandHoldem; import poker.PokerMove; /** * This class is the brains of your bot. Make your calculations here and return the best move with GetMove * * http://www.holdemsecrets.com/startinghands.htm * * @author stilkin */ public class BotStarter implements Bot { public static final String CALL_ACTION = "call"; public static final String RAISE_ACTION = "raise"; public static final String CHECK_ACTION = "check"; public static final String FOLD_ACTION = "fold"; public static final float CURIOSITY = 0.05f; public static final float COCKYNESS = 0.025f; private static final float ODD_LOWER_BOUND = 0.56f; private final HashMap<String, Integer> roundMoneys = new HashMap<String, Integer>(); private final HandParser myHandParser = new HandParser(); private final HandParser tableHandParser = new HandParser(); private String botName = "stilkin"; private HandHoldem hand; private int lastRound = -1; private int minRaise; /** * Implement this method to return the best move you can. Currently it will return a raise the ordinal value of one of our cards is higher than 9, a call when one of the cards * has a higher ordinal value than 5 and a check otherwise. * * @param state * : The current state of your bot, with all the (parsed) information given by the engine * @param timeOut * : The time you have to return a move * @return PokerMove : The move you will be doing */ @Override public PokerMove getMove(BotState state, Long timeOut) { // set some round variables botName = state.getMyName(); hand = state.getHand(); minRaise = 2 * state.getBigBlind(); final Card[] table = state.getTable(); final int callAmount = state.getAmountToCall(); minRaise = Math.max(minRaise, callAmount); // TODO: currently assuming this is right if (lastRound != state.getRound()) { // reset round counters lastRound = state.getRound(); roundMoneys.clear(); System.err.println("Round: " + lastRound); } if (table == null || table.length < 3) { // pre-flop return preFlop(state); } else { // post-flop return postFlop(table, state); } } // ***************** // *** POST FLOP *** // ***************** private PokerMove postFlop(final Card[] table, final BotState state) { // reset parsers tableHandParser.clear(); myHandParser.clear(); // init parser with this rounds' cards tableHandParser.addCards(table); myHandParser.addCards(table); myHandParser.addCards(state.getHand().getCards()); // if the table cards are stronger, we bail if (tableHandParser.getHandCategory().ordinal() >= myHandParser.getHandCategory().ordinal()) { System.err.println("Post-flop, table appears to match our hand: " + myHandParser.getHandCategory().toString()); // TODO: check if we have higher value cards, (this can also be pair or high card case) return preFlopCheck(state); } // if we get here we have at least one of the cards in our hand, otherwise the table would be as good as our hand (see higher) final int callAmount = state.getAmountToCall(); final float costRatio = (float) callAmount / (float) state.getmyStack(); final HandEval.HandCategory myHand = getHandCategory(hand, table); // Get the ordinal values of the cards in your hand final int height1 = hand.getCard(0).getHeight().ordinal(); final int height2 = hand.getCard(1).getHeight().ordinal(); final int sum = height1 + height2; System.err.println("Post-flop, we have " + myHandParser.getHandCategory().toString() + " " + myHand.toString()); System.err.println("Callamount: " + callAmount + " sum: " + sum); final PokerMove oppAction = state.getOpponentAction(); boolean oppRaise = false; if (oppAction != null) { oppRaise = RAISE_ACTION.equals(oppAction.getAction()); } int odds = 1; // calculate some odds as multipliers switch (myHand) { case STRAIGHT_FLUSH: odds = 72192; break; case FOUR_OF_A_KIND: odds = 4164; break; case FULL_HOUSE: odds = 693; break; case FLUSH: odds = 508; break; case STRAIGHT: odds = 254; break; case THREE_OF_A_KIND: odds = 46; break; case TWO_PAIR: odds = 20; break; case PAIR: odds = 2; break; case NO_PAIR: odds = 1; break; default: odds = 1; break; } final boolean flushDanger = tableHandParser.hasSuited(3) && oppRaise; // do I smell a flush? // determine right course of action switch (myHand) { case STRAIGHT_FLUSH: case FOUR_OF_A_KIND: case FULL_HOUSE: case FLUSH: case STRAIGHT: final PokerMove oddRaise = raiseWithOdds(state, odds); if (oddRaise != null) { return oddRaise; // we raise } else { // we have been re-raised if (sum > 15 || costRatio < CURIOSITY) { // TODO: validate return loggedAction(botName, CALL_ACTION, callAmount); } // else check or fold } case THREE_OF_A_KIND: // TODO: find out which card is in the THREE OF A KIND final boolean pairInHand = hand.getCard(0).getHeight() == hand.getCard(1).getHeight(); if (pairInHand) { final PokerMove tripsOddRaise = raiseWithOdds(state, odds / 2); if (tripsOddRaise != null) { return tripsOddRaise; // we raise } else { // we are being re-raised if (!flushDanger && ( sum > 15 || costRatio < CURIOSITY)) { // TODO: validate return loggedAction(botName, CALL_ACTION, callAmount); } } } else if (!flushDanger && ( sum > 15 || costRatio < CURIOSITY)) { // TODO: validate return loggedAction(botName, CALL_ACTION, callAmount); } break; case TWO_PAIR: // TODO: find out which cards are in the TWO PAIR boolean pairOnTable = tableHandParser.getHandCategory().ordinal() >= HandCategory.PAIR.ordinal(); // danger for 3OAK if (!pairOnTable && sum > 10) { final PokerMove twoPairOddRaise = raiseWithOdds(state, odds / 2); if (twoPairOddRaise != null) { return twoPairOddRaise; // we raise } else { // we are being re-raised if (!flushDanger && ( sum > 15 || costRatio < CURIOSITY)) { // TODO: validate return loggedAction(botName, CALL_ACTION, callAmount); } } } else if (!flushDanger && ( sum > 15 || costRatio < CURIOSITY)) { // TODO: validate return loggedAction(botName, CALL_ACTION, callAmount); } break; case PAIR: // if we are here the pair is in our hands if (!flushDanger && ( sum > 20 || costRatio < CURIOSITY)) { // TODO: validate return loggedAction(botName, CALL_ACTION, callAmount); } break; case NO_PAIR: break; } return loggedAction(botName, CHECK_ACTION, 0); } /** * We have a good hand, with how much do we raise? */ private PokerMove raiseWithOdds(final BotState state, int odds) { final int multiplier = 2 + (odds / 120); int raise = multiplier * state.getBigBlind(); final int stackDiff = state.getmyStack() - state.getOpponentStack(); if (stackDiff > 0) { // we are ahead raise += (int) (0.15f * stackDiff); } final int raisedSoFar = roundMoneys.getOrDefault(RAISE_ACTION, 0); final int calledSoFar = roundMoneys.getOrDefault(CALL_ACTION, 0); final int bothSoFar = raisedSoFar + calledSoFar; if (bothSoFar < raise) { // set to raise only once return loggedAction(botName, RAISE_ACTION, raise); } else { final int callAmount = state.getAmountToCall(); return loggedAction(botName, CALL_ACTION, callAmount); } } // **************** // *** PRE FLOP *** // **************** /** * What do we do pre-flop? We get the odds and raise according to any odds over 55% */ private PokerMove preFlop(final BotState state) { final float winOdds = StartingHands.getOdds(hand.getCard(0), hand.getCard(1)); final int callAmount = state.getAmountToCall(); final PokerMove oppAction = state.getOpponentAction(); boolean oppRaise = false; if (oppAction != null) { oppRaise = RAISE_ACTION.equals(oppAction.getAction()); } final PokerMove oddRaise = raiseWithOdds(state, winOdds); if (winOdds > ODD_LOWER_BOUND) { // over 55% if (oddRaise != null ) { if (!oppRaise) { return oddRaise; // we raise } else { // opponent has raised final int diff = oddRaise.getAmount() - callAmount; if (diff >= minRaise) { // we re-raise return loggedAction(botName, RAISE_ACTION, diff); } else { // we call return loggedAction(botName, CALL_ACTION, callAmount); } } } else { // we ate in too deep 0_o System.err.println("Pre-flop, crossing fingers."); return loggedAction(botName, CALL_ACTION, callAmount); } } else if (winOdds > 0 && oddRaise != null) { // between 50% and 55% if (!oppRaise) { // TODO: cutoff here is only 50%, is that wise? System.err.println("Pre-flop, low odds bet."); int prudentBet = oddRaise.getAmount()/2; prudentBet = Math.max(prudentBet, minRaise); return loggedAction(botName, RAISE_ACTION, prudentBet); // we raise } } // poor starting hand, or average hand was re-raised return preFlopCheck(state); } /** * Raises up to a specific amount specified by the odds. Will return null if we cannot raise */ private PokerMove raiseWithOdds(final BotState state, final float winOdds) { final int raisedSoFar = roundMoneys.getOrDefault(RAISE_ACTION, 0); final int calledSoFar = roundMoneys.getOrDefault(CALL_ACTION, 0); final int spentSoFar = raisedSoFar + calledSoFar; final int maxRaise = (int) (winOdds * state.getmyStack()); if (spentSoFar < maxRaise || spentSoFar < minRaise) { final int raisePart = maxRaise / 2; // we raise in 2 steps final int raise = Math.max(minRaise, raisePart); return loggedAction(botName, RAISE_ACTION, raise); } else { return null; } } /** * Calls up to big blind, otherwise checks (pre-flop) */ private PokerMove preFlopCheck(final BotState state) { final int blindDiff = state.getBigBlind() - state.getSmallBlind(); final int callAmount = state.getAmountToCall(); final float costRatio = (float) blindDiff / state.getmyStack(); // when the blind is too big compared to our stack, we don't peek // TODO: is this smart? if (costRatio < CURIOSITY && callAmount <= blindDiff) { return loggedAction(botName, CALL_ACTION, callAmount); } else { return loggedAction(botName, CHECK_ACTION, 0); } } /** * TODO: add more logging to this method */ private PokerMove loggedAction(final String botName, final String action, final int amount) { final int currentAmount = roundMoneys.getOrDefault(action, 0); roundMoneys.put(action, currentAmount + amount); return new PokerMove(botName, action, amount); } /** * Calculates the hand strength, only works with 5 cards. This uses the com.stevebrecher package to get hand strength. * * @param cardSet * : a set of five cards * @return HandCategory with what the cardSet is worth */ public HandEval.HandCategory getCardsCategory(final Card[] cardSet) { if (cardSet != null && cardSet.length == 5) { long handCode = 0; for (Card card : cardSet) { handCode += card.getNumber(); } return rankToCategory(HandEval.hand5Eval(handCode)); } return null; } // *********************** // *** UTILITY METHODS *** // *********************** /** * Calculates the bot's hand strength, with 0, 3, 4 or 5 cards on the table. This uses the com.stevebrecher package to get hand strength. * * @param hand * : cards in hand * @param table * : cards on table * @return HandCategory with what the bot has got, given the table and hand */ public HandEval.HandCategory getHandCategory(HandHoldem hand, Card[] table) { if (table == null || table.length == 0) { // there are no cards on the table return hand.getCard(0).getHeight() == hand.getCard(1).getHeight() // return a pair if our hand cards are the same ? HandEval.HandCategory.PAIR : HandEval.HandCategory.NO_PAIR; } long handCode = hand.getCard(0).getNumber() + hand.getCard(1).getNumber(); for (Card card : table) { handCode += card.getNumber(); } if (table.length == 3) { // three cards on the table return rankToCategory(HandEval.hand5Eval(handCode)); } if (table.length == 4) { // four cards on the table return rankToCategory(HandEval.hand6Eval(handCode)); } return rankToCategory(HandEval.hand7Eval(handCode)); // five cards on the table } /** * small method to convert the int 'rank' to a readable enum called HandCategory */ public HandEval.HandCategory rankToCategory(int rank) { return HandEval.HandCategory.values()[rank >> HandEval.VALUE_SHIFT]; } /** * @param args */ public static void main(String[] args) { final BotParser parser = new BotParser(new BotStarter()); parser.run(); } }
/* * #%L * MariaDB4j * %% * Copyright (C) 2012 - 2014 Michael Vorburger * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package ch.vorburger.exec; import java.io.BufferedInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Map; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecuteResultHandler; import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteException; import org.apache.commons.exec.ExecuteWatchdog; import org.apache.commons.exec.Executor; import org.apache.commons.exec.ProcessDestroyer; import org.apache.commons.exec.PumpStreamHandler; import org.apache.commons.exec.ShutdownHookProcessDestroyer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ch.vorburger.exec.SLF4jLogOutputStream.Type; import ch.vorburger.mariadb4j.Util; /** * Managed OS Process (Executable, Program, Command). Created by * {@link ManagedProcessBuilder#build()}. * * Intended for controlling external "tools", often "daemons", which produce some text-based control * output. In this form not yet suitable for programs returning binary data via stdout (but could be * extended). * * Does reasonably extensive logging about what it's doing (contrary to Apache Commons Exec), * including logging the processes stdout &amp; stderr, into SLF4J (not the System.out.Console). * * @see Executor Internally based on http://commons.apache.org/exec/ but intentionally not exposing * this; could be switched later, if there is any need. * * @author Michael Vorburger */ public class ManagedProcess { private static final Logger logger = LoggerFactory.getLogger(ManagedProcess.class); private static final int INVALID_EXITVALUE = Executor.INVALID_EXITVALUE; private final CommandLine commandLine; private final Executor executor = new DefaultExecutor(); private final DefaultExecuteResultHandler resultHandler = new LoggingExecuteResultHandler(); private final ExecuteWatchdog watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); private final ProcessDestroyer shutdownHookProcessDestroyer = new LoggingShutdownHookProcessDestroyer(); private final Map<String, String> environment; private final InputStream input; private final boolean destroyOnShutdown; private final int consoleBufferMaxLines; private boolean isAlive = false; private String procShortName; private RollingLogOutputStream console; private MultiOutputStream stdouts; private MultiOutputStream stderrs; /** * Package local constructor. * * Keep ch.vorburger.exec's API separate from Apache Commons Exec, so it COULD be replaced. * * @see ManagedProcessBuilder#build() * * @param commandLine Apache Commons Exec CommandLine * @param directory Working directory, or null * @param environment Environment Variable. */ ManagedProcess(CommandLine commandLine, File directory, Map<String, String> environment, InputStream input, boolean destroyOnShutdown, int consoleBufferMaxLines) { this.commandLine = commandLine; this.environment = environment; if (input != null) { this.input = buffer(input); } else { this.input = null; // this is safe/OK/expected; PumpStreamHandler constructor handles // this as // expected } if (directory != null) { executor.setWorkingDirectory(directory); } executor.setWatchdog(watchDog); this.destroyOnShutdown = destroyOnShutdown; this.consoleBufferMaxLines = consoleBufferMaxLines; } // stolen from commons-io IOUtiles (@since v2.5) protected BufferedInputStream buffer(final InputStream inputStream) { // reject null early on rather than waiting for IO operation to fail if (inputStream == null) { // not checked by BufferedInputStream throw new NullPointerException("inputStream == null"); } return inputStream instanceof BufferedInputStream ? (BufferedInputStream) inputStream : new BufferedInputStream(inputStream); } /** * Starts the Process. * * This method always immediately returns (i.e. launches the process asynchronously). Use the * different waitFor... methods if you want to "block" on the spawned process. * * @throws ManagedProcessException if the process could not be started */ public synchronized void start() throws ManagedProcessException { startPreparation(); startExecute(); } protected synchronized void startPreparation() throws ManagedProcessException { if (isAlive()) { throw new ManagedProcessException( procLongName() + " is still running, use another ManagedProcess instance to launch another one"); } if (logger.isInfoEnabled()) logger.info("Starting {}", procLongName()); stdouts = new MultiOutputStream(); stderrs = new MultiOutputStream(); PumpStreamHandler outputHandler = new PumpStreamHandler(stdouts, stderrs, input); executor.setStreamHandler(outputHandler); String pid = procShortName(); stdouts.addOutputStream(new SLF4jLogOutputStream(logger, pid, Type.stdout)); stderrs.addOutputStream(new SLF4jLogOutputStream(logger, pid, Type.stderr)); if (consoleBufferMaxLines > 0) { console = new RollingLogOutputStream(consoleBufferMaxLines); stdouts.addOutputStream(console); stderrs.addOutputStream(console); } if (destroyOnShutdown) { executor.setProcessDestroyer(shutdownHookProcessDestroyer); } if (commandLine.isFile()) { try { Util.forceExecutable(getExecutableFile()); } catch (Exception e) { throw new ManagedProcessException("Unable to make command executable", e); } } else { logger.debug(commandLine.getExecutable() + " is not a java.io.File, so it won't be made executable (which MAY be a problem on *NIX, but not for sure)"); } } public File getExecutableFile() { return new File(commandLine.getExecutable()); } protected synchronized void startExecute() throws ManagedProcessException { try { executor.execute(commandLine, environment, resultHandler); } catch (IOException e) { throw new ManagedProcessException("Launch failed: " + commandLine, e); } isAlive = true; // We now must give the system a say 100ms chance to run the background // thread now, otherwise the resultHandler in checkResult() won't work. // // This is admittedly not ideal, but to do better would require significant // changes to DefaultExecutor, so that its execute() would "fail fast" and // throw an Exception immediately if process start-up fails by doing the // launch in the current thread, and then spawns a separate thread only // for the waitFor(). // // As DefaultExecutor doesn't seem to have been written with extensibility // in mind, and rewriting it to start gain 100ms (at the start of every process..) // doesn't seem to be worth it for now, I'll leave it like this, for now. // try { this.wait(100); // better than Thread.sleep(100); -- thank you, FindBugs } catch (InterruptedException e) { throw handleInterruptedException(e); } checkResult(); } /** * Starts the Process and waits (blocks) until the process prints a certain message. * * You should be sure that the process either prints this message at some point, or otherwise * exits on it's own. This method will otherwise be slow, but never block forever, as it will * "give up" and always return after max. maxWaitUntilReturning ms. * * @param messageInConsole text to wait for in the STDOUT/STDERR of the external process * @param maxWaitUntilReturning maximum time to wait, in milliseconds, until returning, if * message wasn't seen * @return true if message was seen in console; false if message didn't occur and we're * returning due to max. wait timeout * @throws ManagedProcessException for problems such as if the process already exited (without * the message ever appearing in the Console) */ public boolean startAndWaitForConsoleMessageMaxMs(String messageInConsole, long maxWaitUntilReturning) throws ManagedProcessException { startPreparation(); CheckingConsoleOutputStream checkingConsoleOutputStream = new CheckingConsoleOutputStream( messageInConsole); if (stdouts != null && stderrs != null) { stdouts.addOutputStream(checkingConsoleOutputStream); stderrs.addOutputStream(checkingConsoleOutputStream); } long timeAlreadyWaited = 0; final int SLEEP_TIME_MS = 50; logger.info( "Thread will wait for \"{}\" to appear in Console output of process {} for max. " + maxWaitUntilReturning + "ms", messageInConsole, procLongName()); startExecute(); try { while (!checkingConsoleOutputStream.hasSeenIt() && isAlive()) { try { Thread.sleep(SLEEP_TIME_MS); } catch (InterruptedException e) { throw handleInterruptedException(e); } timeAlreadyWaited += SLEEP_TIME_MS; if (timeAlreadyWaited > maxWaitUntilReturning) { logger.warn("Timed out waiting for \"\"{}\"\" after {}ms (returning false)", messageInConsole, maxWaitUntilReturning); return false; } } // If we got out of the while() loop due to !isAlive() instead of messageInConsole, then // throw // the same exception as above! if (!checkingConsoleOutputStream.hasSeenIt()) { throw new ManagedProcessException(getUnexpectedExitMsg(messageInConsole)); } else { return true; } } finally { if (stdouts != null && stderrs != null) { stdouts.removeOutputStream(checkingConsoleOutputStream); stderrs.removeOutputStream(checkingConsoleOutputStream); } } } protected String getUnexpectedExitMsg(String messageInConsole) { return "Asked to wait for \"" + messageInConsole + "\" from " + procLongName() + ", but it already exited! (without that message in console)" + getLastConsoleLines(); } protected ManagedProcessException handleInterruptedException(InterruptedException e) throws ManagedProcessException { // TODO Not sure how to best handle this... opinions welcome (see also below) final String message = "Huh?! InterruptedException should normally never happen here..." + procLongName(); logger.error(message, e); return new ManagedProcessException(message, e); } protected void checkResult() throws ManagedProcessException { if (resultHandler.hasResult()) { // We already terminated (or never started) ExecuteException e = resultHandler.getException(); if (e != null) { logger.error(procLongName() + " failed"); throw new ManagedProcessException(procLongName() + " failed, exitValue=" + exitValue() + getLastConsoleLines(), e); } } } /** * Kills the Process. If you expect that the process may not be running anymore, use if ( * {@link #isAlive()}) around this. If you expect that the process should still be running at * this point, call as is - and it will tell if it had nothing to destroy. * * @throws ManagedProcessException if the Process is already stopped (either because destroy() * already explicitly called, or it terminated by itself, or it was never started) */ public void destroy() throws ManagedProcessException { // // if destroy() is ever giving any trouble, the org.openqa.selenium.os.ProcessUtils may be // of // interest // if (!isAlive) { throw new ManagedProcessException(procLongName() + " was already stopped (or never started)"); } if (logger.isDebugEnabled()) logger.debug("Going to destroy {}", procLongName()); watchDog.destroyProcess(); try { // Safer to waitFor() after destroy() resultHandler.waitFor(); } catch (InterruptedException e) { throw handleInterruptedException(e); } if (logger.isInfoEnabled()) logger.info("Successfully destroyed {}", procLongName()); isAlive = false; } // Java Doc shamelessly copy/pasted from java.lang.Thread#isAlive() : /** * Tests if this process is alive. A process is alive if it has been started and has not yet * terminated. * * @return <code>true</code> if this process is alive; <code>false</code> otherwise. */ public boolean isAlive() { // NOPE: return !resultHandler.hasResult(); return isAlive; } /** * Returns the exit value for the subprocess. * * @return the exit value of the subprocess represented by this <code>Process</code> object. by * convention, the value <code>0</code> indicates normal termination. * @exception ManagedProcessException if the subprocess represented by this * <code>ManagedProcess</code> object has not yet terminated. */ public int exitValue() throws ManagedProcessException { try { return resultHandler.getExitValue(); } catch (IllegalStateException e) { throw new ManagedProcessException("Exit Value not (yet) available for " + procLongName(), e); } } /** * Waits for the process to terminate. * * Returns immediately if the process is already stopped (either because destroy() was already * explicitly called, or it terminated by itself). * * Note that if the process was attempted to be started but that start failed (may be because * the executable could not be found, or some underlying OS error) then it throws a * ManagedProcessException. * * It also throws a ManagedProcessException if {@link #start()} was never even called. * * @return exit value (or INVALID_EXITVALUE if {@link #destroy()} was used) * @throws ManagedProcessException see above */ public int waitForExit() throws ManagedProcessException { logger.info("Thread is now going to wait for this process to terminate itself: {}", procLongName()); return waitForExitMaxMsWithoutLog(-1); } /** * Like {@link #waitForExit()}, but waits max. maxWaitUntilReturning, then returns (even if * still running, taking no action). * * @param maxWaitUntilReturning Time to wait * @return exit value, or INVALID_EXITVALUE if the timeout was reached, or if {@link #destroy()} * was used * @throws ManagedProcessException see above */ public int waitForExitMaxMs(long maxWaitUntilReturning) throws ManagedProcessException { logger.info("Thread is now going to wait max. {}ms for process to terminate itself: {}", maxWaitUntilReturning, procLongName()); return waitForExitMaxMsWithoutLog(maxWaitUntilReturning); } protected int waitForExitMaxMsWithoutLog(long maxWaitUntilReturning) throws ManagedProcessException { assertWaitForIsValid(); try { if (maxWaitUntilReturning != -1) { resultHandler.waitFor(maxWaitUntilReturning); checkResult(); if (!isAlive()) return exitValue(); return INVALID_EXITVALUE; } resultHandler.waitFor(); checkResult(); return exitValue(); } catch (InterruptedException e) { throw handleInterruptedException(e); } } /** * Like {@link #waitForExit()}, but waits max. maxWaitUntilReturning, then destroys if still * running, and returns. * * @param maxWaitUntilDestroyTimeout Time to wait * @throws ManagedProcessException see above */ public void waitForExitMaxMsOrDestroy(long maxWaitUntilDestroyTimeout) throws ManagedProcessException { waitForExitMaxMs(maxWaitUntilDestroyTimeout); if (isAlive()) { logger.info("Process didn't exit within max. {}ms, so going to destroy it now: {}", maxWaitUntilDestroyTimeout, procLongName()); destroy(); } } protected void assertWaitForIsValid() throws ManagedProcessException { if (!isAlive() && !resultHandler.hasResult()) { throw new ManagedProcessException("Asked to waitFor " + procLongName() + ", but it was never even start()'ed!"); } } // --- public String getConsole() { if (console != null) return console.getRecentLines(); else return ""; } public String getLastConsoleLines() { return ", last " + consoleBufferMaxLines + " lines of console:\n" + getConsole(); } // --- private String procShortName() { // could later be extended to some sort of fake numeric PID, e.g. "mysqld-1", from a static // Map<String execName, Integer id) if (procShortName == null) { File exec = getExecutableFile(); procShortName = exec.getName(); } return procShortName; } private String procLongName() { return "Program " + commandLine.toString() + (executor.getWorkingDirectory() == null ? "" : " (in working directory " + executor.getWorkingDirectory().getAbsolutePath() + ")"); } // --- public class LoggingExecuteResultHandler extends DefaultExecuteResultHandler { @Override public void onProcessComplete(int exitValue) { super.onProcessComplete(exitValue); logger.info(procLongName() + " just exited, with value " + exitValue); isAlive = false; } @Override public void onProcessFailed(ExecuteException e) { super.onProcessFailed(e); if (!watchDog.killedProcess()) { logger.error(procLongName() + " failed unexpectedly", e); } isAlive = false; } } public static class LoggingShutdownHookProcessDestroyer extends ShutdownHookProcessDestroyer { @Override public void run() { logger.info("Shutdown Hook: JVM is about to exit! Going to kill destroyOnShutdown processes..."); super.run(); } } }
/* * Copyright 2016 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.gateway.engine.soap; import io.apiman.gateway.engine.io.IApimanBuffer; import java.io.UnsupportedEncodingException; /** * Used to scan the first part of the soap envelope, looking for the section * that defines the (optional) soap headers. If the soap headers are not * found within a reasonable number of bytes read, then an error is thrown. * Typically that would indicate that the content doesn't actually contain * a soap payload. * * An example of a SOAP Message is: * * <pre> * &lt;?xml version="1.0"?> * &lt;soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope"> * &lt;soap:Header> * &lt;ns1:SomeHeader xmlns:ns1="uri:namespace1">Value&lt;/ns1:SomeHeader> * &lt;/soap:Header> * &lt;soap:Body> * &lt;m:GetStockPrice xmlns:m="http://www.example.org/stock/"> * &lt;m:StockName>RHT&lt;/m:StockName> * &lt;/m:GetStockPrice> * &lt;/soap:Body> * &lt;/soap:Envelope> * </pre> * * @author eric.wittmann@gmail.com */ public class SoapHeaderScanner { private static final int MAX_BUFFER = 1024 * 4; // 4k default max-buffer size private IApimanBuffer buffer; private int maxBufferLength = MAX_BUFFER; private ByteRange xmlPreamble = new ByteRange(); private ByteRange envelopeDecl = new ByteRange(); private ByteRange headers = new ByteRange(); /** * Constructor. */ public SoapHeaderScanner() { } /** * Append the given data to any existing buffer, then scan the buffer * looking for the soap headers. If scanning is complete, this method * will return true. If more data is required, then the method will return * false. If an error condition is detected, then an exception will be * thrown. * @param buffer */ public boolean scan(IApimanBuffer buffer) throws SoapEnvelopeNotFoundException { if (this.buffer == null) { this.buffer = buffer; } else { this.buffer.append(buffer); } boolean scanComplete = doScan(); // If our buffer is already "max size" but we haven't found the start of the // soap envelope yet, then we're likely not going to find it. if (!scanComplete && this.buffer.length() >= getMaxBufferLength()) { throw new SoapEnvelopeNotFoundException(); } return scanComplete; } /** * @return true if the scan found what it was looking for, false if more data is needed */ private boolean doScan() throws SoapEnvelopeNotFoundException { xmlPreamble.startIdx = xmlPreamble.endIdx = -1; envelopeDecl.startIdx = envelopeDecl.endIdx = -1; headers.startIdx = headers.endIdx = -1; int currentIdx = 0; while (currentIdx < buffer.length()) { byte currentByte = buffer.get(currentIdx); if (currentByte == (byte) '<') { if (xmlPreamble.startIdx == -1 && isPreamble(currentIdx)) { currentIdx = consumePreamble(currentIdx); } else if (envelopeDecl.startIdx == -1) { currentIdx = consumeEnvelopeDecl(currentIdx); if (currentIdx == -1) { throw new SoapEnvelopeNotFoundException(); } } else { currentIdx = consumeHeaders(currentIdx); if (currentIdx == -1) { throw new SoapEnvelopeNotFoundException(); } } } else { currentIdx++; } if (headers.endIdx != -1) { return true; } } return false; } /** * @param index */ private int consumePreamble(int index) { int end = findFrom('>', index); if (end == -1) { return index + 1; } else { xmlPreamble.startIdx = index; xmlPreamble.endIdx = end; return end + 1; } } /** * @param index */ private int consumeEnvelopeDecl(int index) { int end = findFrom('>', index); if (end == -1) { // Not enough buffer - need more data. return buffer.length(); } try { String str = buffer.getString(index, end + 1, "UTF-8"); //$NON-NLS-1$ String [] split = str.split(" "); //$NON-NLS-1$ if (split[0].endsWith("Envelope")) { //$NON-NLS-1$ envelopeDecl.startIdx = index; envelopeDecl.endIdx = end; return end + 1; } } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } // We found a section of XML that should have been a soap:Envelope, but wasn't. return -1; } /** * @param index */ private int consumeHeaders(int index) { int end = findFrom('>', index); if (end == -1) { // Not enough buffer - need more data. return buffer.length(); } try { String str = buffer.getString(index, end + 1, "UTF-8"); //$NON-NLS-1$ String [] split = str.split("[ >]"); //$NON-NLS-1$ if (!split[0].endsWith("Header")) { //$NON-NLS-1$ // We found a section of XML that should have been soap:Header, but wasn't. return end; } } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } // At this point we know that we've found the start of the headers. headers.startIdx = index; // Now read the buffer until we find the end tag </soap:Header> end = findHeaderEndTag(end); if (end == -1) { // Not enough buffer - need more data. return buffer.length(); } else { headers.endIdx = end; } return index + 1; } /** * Scans through the buffer, starting at the given index, looking for the * soap:Header end tag (</soap:Header>) The end tag may have any prefix, or * none at all. We'll actually just scan for end tags until we find one that * should match the current element. * @param index */ private int findHeaderEndTag(int index) { int currentIdx = index; int depth = 0; while (currentIdx < buffer.length()) { byte currentByte = buffer.get(currentIdx); if (currentByte == (byte) '<') { boolean isEndTag = (currentIdx + 1) < buffer.length() && buffer.get(currentIdx+1) == '/'; if (isEndTag && depth > 0) { // Found an end tag corresponding to some header element within soap:Header depth--; currentIdx = findFrom('>', currentIdx + 1); if (currentIdx == -1) { return -1; } } else if (isEndTag && depth == 0) { // Found it! Probably. int end = findFrom('>', currentIdx + 1); return end; } else { // Found a start tag corresponding to a child element of soap:Header depth++; currentIdx = findFrom('>', currentIdx + 1); if (currentIdx == -1) { return -1; } } } else { currentIdx++; } } return -1; } /** * Returns true if the index points to an XML preamble of the following example form: * * <pre> * &lt;?xml version="1.0"?> * </pre> * @param index */ private boolean isPreamble(int index) { if (index <= buffer.length() - 6) { if ( buffer.get(index) == '<' && buffer.get(index + 1) == '?' && buffer.get(index + 2) == 'x' && buffer.get(index + 3) == 'm' && buffer.get(index + 4) == 'l' && buffer.get(index + 5) == ' ' ) { return true; } } return false; } /** * Search for the given character in the buffer, starting at the * given index. If not found, return -1. If found, return the * index of the character. * @param c * @param index */ private int findFrom(char c, int index) { int currentIdx = index; while (currentIdx < buffer.length()) { if (buffer.get(currentIdx) == c) { return currentIdx; } currentIdx++; } return -1; } /** * @return the maxBufferLength */ public int getMaxBufferLength() { return maxBufferLength; } /** * @param maxBufferLength the maxBufferLength to set */ public void setMaxBufferLength(int maxBufferLength) { this.maxBufferLength = maxBufferLength; } /** * @return true if an XML preamble was found */ public boolean hasXmlPreamble() { return xmlPreamble.startIdx != -1 && xmlPreamble.endIdx != -1; } /** * @return the xml preamble found during scanning */ public String getXmlPreamble() { try { return buffer.getString(xmlPreamble.startIdx, xmlPreamble.endIdx + 1, "UTF-8"); //$NON-NLS-1$ } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } /** * @return the soap envelope declaration found during scanning */ public String getEnvelopeDeclaration() { try { return buffer.getString(envelopeDecl.startIdx, envelopeDecl.endIdx + 1, "UTF-8"); //$NON-NLS-1$ } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } /** * @return the soap envelope declaration found during scanning */ public String getHeaders() { try { return buffer.getString(headers.startIdx, headers.endIdx + 1, "UTF-8"); //$NON-NLS-1$ } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } /** * Return the rest of the buffer (everything after the soap headers); * @param buffer */ public byte[] getRemainingBytes() { return this.buffer.getBytes(headers.endIdx + 1, buffer.length()); } /** * Models a range of bytes within the buffer. * @author eric.wittmann@gmail.com */ private static class ByteRange { int startIdx = -1; int endIdx = -1; } }
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008-2017. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package edu.wpi.first.wpilibj; import edu.wpi.first.wpilibj.hal.FRCNetComm.tResourceType; import edu.wpi.first.wpilibj.hal.HAL; /** * Handle input from standard Joysticks connected to the Driver Station. This class handles standard * input that comes from the Driver Station. Each time a value is requested the most recent value is * returned. There is a single class instance for each joystick and the mapping of ports to hardware * buttons depends on the code in the Driver Station. */ public class Joystick extends JoystickBase { static final byte kDefaultXAxis = 0; static final byte kDefaultYAxis = 1; static final byte kDefaultZAxis = 2; static final byte kDefaultTwistAxis = 2; static final byte kDefaultThrottleAxis = 3; static final int kDefaultTriggerButton = 1; static final int kDefaultTopButton = 2; /** * Represents an analog axis on a joystick. */ public enum AxisType { kX(0), kY(1), kZ(2), kTwist(3), kThrottle(4), kNumAxis(5); @SuppressWarnings("MemberName") public final int value; private AxisType(int value) { this.value = value; } } /** * Represents a digital button on the JoyStick. */ public enum ButtonType { kTrigger(0), kTop(1), kNumButton(2); @SuppressWarnings("MemberName") public final int value; private ButtonType(int value) { this.value = value; } } private final DriverStation m_ds; private final byte[] m_axes; private final byte[] m_buttons; private int m_outputs; private short m_leftRumble; private short m_rightRumble; /** * Construct an instance of a joystick. The joystick index is the USB port on the drivers * station. * * @param port The port on the Driver Station that the joystick is plugged into. */ public Joystick(final int port) { this(port, AxisType.kNumAxis.value, ButtonType.kNumButton.value); m_axes[AxisType.kX.value] = kDefaultXAxis; m_axes[AxisType.kY.value] = kDefaultYAxis; m_axes[AxisType.kZ.value] = kDefaultZAxis; m_axes[AxisType.kTwist.value] = kDefaultTwistAxis; m_axes[AxisType.kThrottle.value] = kDefaultThrottleAxis; m_buttons[ButtonType.kTrigger.value] = kDefaultTriggerButton; m_buttons[ButtonType.kTop.value] = kDefaultTopButton; HAL.report(tResourceType.kResourceType_Joystick, port); } /** * Protected version of the constructor to be called by sub-classes. * * <p>This constructor allows the subclass to configure the number of constants for axes and * buttons. * * @param port The port on the Driver Station that the joystick is plugged into. * @param numAxisTypes The number of axis types in the enum. * @param numButtonTypes The number of button types in the enum. */ protected Joystick(int port, int numAxisTypes, int numButtonTypes) { super(port); m_ds = DriverStation.getInstance(); m_axes = new byte[numAxisTypes]; m_buttons = new byte[numButtonTypes]; } /** * Get the X value of the joystick. This depends on the mapping of the joystick connected to the * current port. * * @param hand Unused * @return The X value of the joystick. */ @Override public final double getX(Hand hand) { return getRawAxis(m_axes[AxisType.kX.value]); } /** * Get the Y value of the joystick. This depends on the mapping of the joystick connected to the * current port. * * @param hand Unused * @return The Y value of the joystick. */ @Override public final double getY(Hand hand) { return getRawAxis(m_axes[AxisType.kY.value]); } @Override public final double getZ(Hand hand) { return getRawAxis(m_axes[AxisType.kZ.value]); } /** * Get the twist value of the current joystick. This depends on the mapping of the joystick * connected to the current port. * * @return The Twist value of the joystick. */ public double getTwist() { return getRawAxis(m_axes[AxisType.kTwist.value]); } /** * Get the throttle value of the current joystick. This depends on the mapping of the joystick * connected to the current port. * * @return The Throttle value of the joystick. */ public double getThrottle() { return getRawAxis(m_axes[AxisType.kThrottle.value]); } /** * Get the value of the axis. * * @param axis The axis to read, starting at 0. * @return The value of the axis. */ public double getRawAxis(final int axis) { return m_ds.getStickAxis(getPort(), axis); } /** * For the current joystick, return the axis determined by the argument. * * <p>This is for cases where the joystick axis is returned programatically, otherwise one of the * previous functions would be preferable (for example getX()). * * @param axis The axis to read. * @return The value of the axis. */ public double getAxis(final AxisType axis) { switch (axis) { case kX: return getX(); case kY: return getY(); case kZ: return getZ(); case kTwist: return getTwist(); case kThrottle: return getThrottle(); default: return 0.0; } } /** * For the current joystick, return the number of axis. */ public int getAxisCount() { return m_ds.getStickAxisCount(getPort()); } /** * Read the state of the trigger on the joystick. * * <p>Look up which button has been assigned to the trigger and read its state. * * @param hand This parameter is ignored for the Joystick class and is only here to complete the * GenericHID interface. * @return The state of the trigger. */ @SuppressWarnings("PMD.UnusedFormalParameter") public boolean getTrigger(Hand hand) { return getRawButton(m_buttons[ButtonType.kTrigger.value]); } /** * Read the state of the top button on the joystick. * * <p>Look up which button has been assigned to the top and read its state. * * @param hand This parameter is ignored for the Joystick class and is only here to complete the * GenericHID interface. * @return The state of the top button. */ @SuppressWarnings("PMD.UnusedFormalParameter") public boolean getTop(Hand hand) { return getRawButton(m_buttons[ButtonType.kTop.value]); } @Override public int getPOV(int pov) { return m_ds.getStickPOV(getPort(), pov); } @Override public int getPOVCount() { return m_ds.getStickPOVCount(getPort()); } /** * This is not supported for the Joystick. This method is only here to complete the GenericHID * interface. * * @param hand This parameter is ignored for the Joystick class and is only here to complete the * GenericHID interface. * @return The state of the bumper (always false) */ @SuppressWarnings("PMD.UnusedFormalParameter") public boolean getBumper(Hand hand) { return false; } /** * Get the button value (starting at button 1). * * <p>The appropriate button is returned as a boolean value. * * @param button The button number to be read (starting at 1). * @return The state of the button. */ public boolean getRawButton(final int button) { return m_ds.getStickButton(getPort(), (byte) button); } /** * For the current joystick, return the number of buttons. */ public int getButtonCount() { return m_ds.getStickButtonCount(getPort()); } /** * Get buttons based on an enumerated type. * * <p>The button type will be looked up in the list of buttons and then read. * * @param button The type of button to read. * @return The state of the button. */ public boolean getButton(ButtonType button) { switch (button) { case kTrigger: return getTrigger(); case kTop: return getTop(); default: return false; } } /** * Get the magnitude of the direction vector formed by the joystick's current position relative to * its origin. * * @return The magnitude of the direction vector */ public double getMagnitude() { return Math.sqrt(Math.pow(getX(), 2) + Math.pow(getY(), 2)); } /** * Get the direction of the vector formed by the joystick and its origin in radians. * * @return The direction of the vector in radians */ public double getDirectionRadians() { return Math.atan2(getX(), -getY()); } /** * Get the direction of the vector formed by the joystick and its origin in degrees. * * <p>Uses acos(-1) to represent Pi due to absence of readily accessable Pi constant in C++ * * @return The direction of the vector in degrees */ public double getDirectionDegrees() { return Math.toDegrees(getDirectionRadians()); } /** * Get the channel currently associated with the specified axis. * * @param axis The axis to look up the channel for. * @return The channel fr the axis. */ public int getAxisChannel(AxisType axis) { return m_axes[axis.value]; } /** * Set the channel associated with a specified axis. * * @param axis The axis to set the channel for. * @param channel The channel to set the axis to. */ public void setAxisChannel(AxisType axis, int channel) { m_axes[axis.value] = (byte) channel; } /** * Get the value of isXbox for the current joystick. * * @return A boolean that is true if the controller is an xbox controller. */ public boolean getIsXbox() { return m_ds.getJoystickIsXbox(getPort()); } /** * Get the axis type of a joystick axis. * * @return the axis type of a joystick axis. */ public int getAxisType(int axis) { return m_ds.getJoystickAxisType(getPort(), axis); } /** * Get the type of the HID. * * @return the type of the HID. */ @Override public HIDType getType() { return HIDType.values()[m_ds.getJoystickType(getPort())]; } /** * Get the name of the HID. * * @return the name of the HID. */ @Override public String getName() { return m_ds.getJoystickName(getPort()); } @Override public void setOutput(int outputNumber, boolean value) { m_outputs = (m_outputs & ~(1 << (outputNumber - 1))) | ((value ? 1 : 0) << (outputNumber - 1)); HAL.setJoystickOutputs((byte) getPort(), m_outputs, m_leftRumble, m_rightRumble); } @Override public void setOutputs(int value) { m_outputs = value; HAL.setJoystickOutputs((byte) getPort(), m_outputs, m_leftRumble, m_rightRumble); } @Override public void setRumble(RumbleType type, double value) { if (value < 0) { value = 0; } else if (value > 1) { value = 1; } if (type == RumbleType.kLeftRumble) { m_leftRumble = (short) (value * 65535); } else { m_rightRumble = (short) (value * 65535); } HAL.setJoystickOutputs((byte) getPort(), m_outputs, m_leftRumble, m_rightRumble); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openfact.subsystem.server.extension; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.operations.common.Util; import org.jboss.dmr.ModelNode; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.WRITE_ATTRIBUTE_OPERATION; import static org.openfact.subsystem.server.extension.OpenfactSubsystemDefinition.MASTER_ORGANIZATION_NAME; import static org.openfact.subsystem.server.extension.OpenfactSubsystemDefinition.PROVIDERS; import static org.openfact.subsystem.server.extension.OpenfactSubsystemDefinition.SCHEDULED_TASK_INTERVAL; import static org.openfact.subsystem.server.extension.ThemeResourceDefinition.CACHE_TEMPLATES; import static org.openfact.subsystem.server.extension.ThemeResourceDefinition.CACHE_THEMES; import static org.openfact.subsystem.server.extension.ThemeResourceDefinition.DEFAULT; import static org.openfact.subsystem.server.extension.ThemeResourceDefinition.DIR; import static org.openfact.subsystem.server.extension.ThemeResourceDefinition.MODULES; import static org.openfact.subsystem.server.extension.ThemeResourceDefinition.STATIC_MAX_AGE; import static org.openfact.subsystem.server.extension.ThemeResourceDefinition.WELCOME_THEME; /** * Converts json representation of Openfact config to DMR operations. * * @author Stan Silvert ssilvert@redhat.com (C) 2016 Red Hat Inc. */ public class JsonConfigConverter { private static final List<String> NON_SPI_LIST = new ArrayList<>(); static { NON_SPI_LIST.add("providers"); NON_SPI_LIST.add("admin"); NON_SPI_LIST.add("theme"); NON_SPI_LIST.add("scheduled"); } /** * Convert openfact-server.json to DMR operations that write to standalone.xml * or domain.xml. * * @param json The json representation of the config. * @param subsysAddress The management model address of the openfact-server subsystem. * @return A list of DMR operations. * @throws IOException If the json can not be parsed. */ public static List<ModelNode> convertJsonConfig(String json, PathAddress subsysAddress) throws IOException { List<ModelNode> list = new ArrayList<>(); JsonNode root = new ObjectMapper().readTree(json); list.add(masterOrganizationName(root, subsysAddress)); list.add(scheduledTaskInterval(root, subsysAddress)); list.add(providers(root, subsysAddress)); list.add(theme(root, subsysAddress.append(ThemeResourceDefinition.TAG_NAME, ThemeResourceDefinition.RESOURCE_NAME))); list.addAll(spis(root, subsysAddress)); return list; } private static ModelNode masterOrganizationName(JsonNode root, PathAddress addr) { JsonNode targetNode = getNode(root, "admin", "organization"); String value = MASTER_ORGANIZATION_NAME.getDefaultValue().asString(); if (targetNode != null) value = targetNode.asText(value); ModelNode op = Util.createOperation(WRITE_ATTRIBUTE_OPERATION, addr); op.get("name").set(MASTER_ORGANIZATION_NAME.getName()); op.get("value").set(value); return op; } private static ModelNode scheduledTaskInterval(JsonNode root, PathAddress addr) { JsonNode targetNode = getNode(root, "scheduled", "interval"); Long value = SCHEDULED_TASK_INTERVAL.getDefaultValue().asLong(); if (targetNode != null) value = targetNode.asLong(value); ModelNode op = Util.createOperation(WRITE_ATTRIBUTE_OPERATION, addr); op.get("name").set(SCHEDULED_TASK_INTERVAL.getName()); op.get("value").set(value); return op; } private static ModelNode providers(JsonNode root, PathAddress addr) { JsonNode targetNode = getNode(root, "providers"); ModelNode value = PROVIDERS.getDefaultValue(); if (targetNode != null && targetNode.isArray()) { value = new ModelNode(); for (JsonNode node : targetNode) { value.add(node.asText()); } } ModelNode op = Util.createOperation(WRITE_ATTRIBUTE_OPERATION, addr); op.get("name").set(PROVIDERS.getName()); op.get("value").set(value); return op; } private static ModelNode theme(JsonNode root, PathAddress addr) { JsonNode themeNode = getNode(root, "theme"); ModelNode op = Util.createAddOperation(addr); JsonNode targetNode = getNode(themeNode, "staticMaxAge"); Long lValue = STATIC_MAX_AGE.getDefaultValue().asLong(); if (targetNode != null) lValue = targetNode.asLong(lValue); op.get(STATIC_MAX_AGE.getName()).set(lValue); targetNode = getNode(themeNode, "cacheTemplates"); Boolean bValue = CACHE_TEMPLATES.getDefaultValue().asBoolean(); if (targetNode != null) bValue = targetNode.asBoolean(bValue); op.get(CACHE_TEMPLATES.getName()).set(bValue); targetNode = getNode(themeNode, "cacheThemes"); bValue = CACHE_THEMES.getDefaultValue().asBoolean(); if (targetNode != null) bValue = targetNode.asBoolean(bValue); op.get(CACHE_THEMES.getName()).set(bValue); targetNode = getNode(themeNode, "folder", "dir"); String sValue = DIR.getDefaultValue().asString(); if (targetNode != null) sValue = targetNode.asText(sValue); op.get(DIR.getName()).set(sValue); targetNode = getNode(themeNode, "welcomeTheme"); if (targetNode != null) op.get(WELCOME_THEME.getName()).set(targetNode.asText()); targetNode = getNode(themeNode, "default"); if (targetNode != null) op.get(DEFAULT.getName()).set(targetNode.asText()); targetNode = getNode(themeNode, "module", "modules"); if (targetNode != null && targetNode.isArray()) { op.get(MODULES.getName()).set(themeModules(targetNode)); } return op; } private static ModelNode themeModules(JsonNode modulesNode) { ModelNode modules = new ModelNode(); for (JsonNode node : modulesNode) { modules.add(node.asText()); } return modules; } private static Collection<ModelNode> spis(JsonNode root, PathAddress addr) { List<ModelNode> spis = new ArrayList<>(); Iterator<String> spiIterator = root.fieldNames(); while (spiIterator.hasNext()) { String spiName = spiIterator.next(); if (NON_SPI_LIST.contains(spiName)) continue; PathAddress spiAddr = addr.append("spi", spiName); spis.addAll(spi(root, spiAddr, spiName)); } return spis; } private static List<ModelNode> spi(JsonNode root, PathAddress spiAddr, String spiName) { List<ModelNode> spiAndProviders = new ArrayList<>(); ModelNode op = Util.createAddOperation(spiAddr); spiAndProviders.add(op); Iterator<String> providerIterator = root.get(spiName).fieldNames(); while (providerIterator.hasNext()) { String providerName = providerIterator.next(); if ("provider".equals(providerName)) { op.get(SpiResourceDefinition.DEFAULT_PROVIDER.getName()).set(getNode(root, spiName, "provider").asText()); } else { PathAddress providerAddr = spiAddr.append("provider", providerName); spiAndProviders.add(spiProvider(getNode(root, spiName, providerName), providerAddr)); } } return spiAndProviders; } private static ModelNode spiProvider(JsonNode providerNode, PathAddress providerAddr) { ModelNode op = Util.createAddOperation(providerAddr); ModelNode properties = new ModelNode(); Iterator<String> propNames = providerNode.fieldNames(); while (propNames.hasNext()) { String propName = propNames.next(); if ("enabled".equals(propName)) { op.get(ProviderResourceDefinition.ENABLED.getName()).set(providerNode.get(propName).asBoolean()); } else { if (providerNode.get(propName).isArray()) { properties.get(propName).set(makeArrayText(providerNode.get(propName))); } else { properties.get(propName).set(providerNode.get(propName).asText()); } } } if (properties.isDefined() && !properties.asPropertyList().isEmpty()) { op.get("properties").set(properties); } if (!op.hasDefined(ProviderResourceDefinition.ENABLED.getName())) { op.get(ProviderResourceDefinition.ENABLED.getName()).set(ProviderResourceDefinition.ENABLED.getDefaultValue()); } return op; } private static String makeArrayText(JsonNode arrayNode) { StringBuilder builder = new StringBuilder("["); Iterator<JsonNode> nodes = arrayNode.iterator(); while (nodes.hasNext()) { JsonNode node = nodes.next(); builder.append("\""); builder.append(node.asText()); builder.append("\""); if (nodes.hasNext()) builder.append(","); } builder.append("]"); return builder.toString(); } private static JsonNode getNode(JsonNode root, String... path) { if (root == null) { return null; } JsonNode n = root; for (String p : path) { n = n.get(p); if (n == null) { return null; } } return n; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.service.reads; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.function.UnaryOperator; import com.google.common.base.Joiner; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.cql3.statements.schema.IndexTarget; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.DeletionTime; import org.apache.cassandra.db.ReadCommand; import org.apache.cassandra.db.ReadResponse; import org.apache.cassandra.db.filter.DataLimits; import org.apache.cassandra.db.partitions.PartitionIterator; import org.apache.cassandra.db.partitions.PartitionIterators; import org.apache.cassandra.db.partitions.UnfilteredPartitionIterator; import org.apache.cassandra.db.partitions.UnfilteredPartitionIterators; import org.apache.cassandra.db.rows.RangeTombstoneMarker; import org.apache.cassandra.db.rows.Row; import org.apache.cassandra.db.rows.UnfilteredRowIterator; import org.apache.cassandra.db.rows.UnfilteredRowIterators; import org.apache.cassandra.db.transform.EmptyPartitionsDiscarder; import org.apache.cassandra.db.transform.Filter; import org.apache.cassandra.db.transform.FilteredPartitions; import org.apache.cassandra.db.transform.Transformation; import org.apache.cassandra.index.sasi.SASIIndex; import org.apache.cassandra.locator.Endpoints; import org.apache.cassandra.locator.ReplicaPlan; import org.apache.cassandra.net.Message; import org.apache.cassandra.schema.IndexMetadata; import org.apache.cassandra.schema.TableMetadata; import org.apache.cassandra.service.reads.repair.ReadRepair; import org.apache.cassandra.service.reads.repair.RepairedDataTracker; import org.apache.cassandra.service.reads.repair.RepairedDataVerifier; import static com.google.common.collect.Iterables.*; public class DataResolver<E extends Endpoints<E>, P extends ReplicaPlan.ForRead<E>> extends ResponseResolver<E, P> { private final boolean enforceStrictLiveness; private final ReadRepair<E, P> readRepair; public DataResolver(ReadCommand command, ReplicaPlan.Shared<E, P> replicaPlan, ReadRepair<E, P> readRepair, long queryStartNanoTime) { super(command, replicaPlan, queryStartNanoTime); this.enforceStrictLiveness = command.metadata().enforceStrictLiveness(); this.readRepair = readRepair; } public PartitionIterator getData() { ReadResponse response = responses.get(0).payload; return UnfilteredPartitionIterators.filter(response.makeIterator(command), command.nowInSec()); } public boolean isDataPresent() { return !responses.isEmpty(); } public PartitionIterator resolve() { // We could get more responses while this method runs, which is ok (we're happy to ignore any response not here // at the beginning of this method), so grab the response count once and use that through the method. Collection<Message<ReadResponse>> messages = responses.snapshot(); assert !any(messages, msg -> msg.payload.isDigestResponse()); E replicas = replicaPlan().candidates().select(transform(messages, Message::from), false); // If requested, inspect each response for a digest of the replica's repaired data set RepairedDataTracker repairedDataTracker = command.isTrackingRepairedStatus() ? new RepairedDataTracker(getRepairedDataVerifier(command)) : null; if (repairedDataTracker != null) { messages.forEach(msg -> { if (msg.payload.mayIncludeRepairedDigest() && replicas.byEndpoint().get(msg.from()).isFull()) { repairedDataTracker.recordDigest(msg.from(), msg.payload.repairedDataDigest(), msg.payload.isRepairedDigestConclusive()); } }); } if (!needsReplicaFilteringProtection()) { ResolveContext context = new ResolveContext(replicas); return resolveWithReadRepair(context, i -> shortReadProtectedResponse(i, context), UnaryOperator.identity(), repairedDataTracker); } return resolveWithReplicaFilteringProtection(replicas, repairedDataTracker); } private boolean needsReplicaFilteringProtection() { if (command.rowFilter().isEmpty()) return false; IndexMetadata indexDef = command.indexMetadata(); if (indexDef != null && indexDef.isCustom()) { String className = indexDef.options.get(IndexTarget.CUSTOM_INDEX_OPTION_NAME); return !SASIIndex.class.getName().equals(className); } return true; } private class ResolveContext { private final E replicas; private final DataLimits.Counter mergedResultCounter; private ResolveContext(E replicas) { this.replicas = replicas; this.mergedResultCounter = command.limits().newCounter(command.nowInSec(), true, command.selectsFullPartition(), enforceStrictLiveness); } private boolean needsReadRepair() { return replicas.size() > 1; } private boolean needShortReadProtection() { // If we have only one result, there is no read repair to do and we can't get short reads // Also, so-called "short reads" stems from nodes returning only a subset of the results they have for a // partition due to the limit, but that subset not being enough post-reconciliation. So if we don't have limit, // don't bother protecting against short reads. return replicas.size() > 1 && !command.limits().isUnlimited(); } } @FunctionalInterface private interface ResponseProvider { UnfilteredPartitionIterator getResponse(int i); } private UnfilteredPartitionIterator shortReadProtectedResponse(int i, ResolveContext context) { UnfilteredPartitionIterator originalResponse = responses.get(i).payload.makeIterator(command); return context.needShortReadProtection() ? ShortReadProtection.extend(context.replicas.get(i), () -> responses.clearUnsafe(i), originalResponse, command, context.mergedResultCounter, queryStartNanoTime, enforceStrictLiveness) : originalResponse; } private PartitionIterator resolveWithReadRepair(ResolveContext context, ResponseProvider responseProvider, UnaryOperator<PartitionIterator> preCountFilter, RepairedDataTracker repairedDataTracker) { UnfilteredPartitionIterators.MergeListener listener = null; if (context.needsReadRepair()) { P sources = replicaPlan.getWithContacts(context.replicas); listener = wrapMergeListener(readRepair.getMergeListener(sources), sources, repairedDataTracker); } return resolveInternal(context, listener, responseProvider, preCountFilter); } @SuppressWarnings("resource") private PartitionIterator resolveWithReplicaFilteringProtection(E replicas, RepairedDataTracker repairedDataTracker) { // Protecting against inconsistent replica filtering (some replica returning a row that is outdated but that // wouldn't be removed by normal reconciliation because up-to-date replica have filtered the up-to-date version // of that row) involves 3 main elements: // 1) We combine short-read protection and a merge listener that identifies potentially "out-of-date" // rows to create an iterator that is guaranteed to produce enough valid row results to satisfy the query // limit if enough actually exist. A row is considered out-of-date if its merged from is non-empty and we // receive not response from at least one replica. In this case, it is possible that filtering at the // "silent" replica has produced a more up-to-date result. // 2) This iterator is passed to the standard resolution process with read-repair, but is first wrapped in a // response provider that lazily "completes" potentially out-of-date rows by directly querying them on the // replicas that were previously silent. As this iterator is consumed, it caches valid data for potentially // out-of-date rows, and this cached data is merged with the fetched data as rows are requested. If there // is no replica divergence, only rows in the partition being evalutated will be cached (then released // when the partition is consumed). // 3) After a "complete" row is materialized, it must pass the row filter supplied by the original query // before it counts against the limit. // We need separate contexts, as each context has his own counter ResolveContext firstPhaseContext = new ResolveContext(replicas); ResolveContext secondPhaseContext = new ResolveContext(replicas); ReplicaFilteringProtection<E> rfp = new ReplicaFilteringProtection<>(replicaPlan().keyspace(), command, replicaPlan().consistencyLevel(), queryStartNanoTime, firstPhaseContext.replicas, DatabaseDescriptor.getCachedReplicaRowsWarnThreshold(), DatabaseDescriptor.getCachedReplicaRowsFailThreshold()); PartitionIterator firstPhasePartitions = resolveInternal(firstPhaseContext, rfp.mergeController(), i -> shortReadProtectedResponse(i, firstPhaseContext), UnaryOperator.identity()); PartitionIterator completedPartitions = resolveWithReadRepair(secondPhaseContext, i -> rfp.queryProtectedPartitions(firstPhasePartitions, i), results -> command.rowFilter().filter(results, command.metadata(), command.nowInSec()), repairedDataTracker); // Ensure that the RFP instance has a chance to record metrics when the iterator closes. return PartitionIterators.doOnClose(completedPartitions, firstPhasePartitions::close); } @SuppressWarnings("resource") private PartitionIterator resolveInternal(ResolveContext context, UnfilteredPartitionIterators.MergeListener mergeListener, ResponseProvider responseProvider, UnaryOperator<PartitionIterator> preCountFilter) { int count = context.replicas.size(); List<UnfilteredPartitionIterator> results = new ArrayList<>(count); for (int i = 0; i < count; i++) results.add(responseProvider.getResponse(i)); /* * Even though every response, individually, will honor the limit, it is possible that we will, after the merge, * have more rows than the client requested. To make sure that we still conform to the original limit, * we apply a top-level post-reconciliation counter to the merged partition iterator. * * Short read protection logic (ShortReadRowsProtection.moreContents()) relies on this counter to be applied * to the current partition to work. For this reason we have to apply the counter transformation before * empty partition discard logic kicks in - for it will eagerly consume the iterator. * * That's why the order here is: 1) merge; 2) filter rows; 3) count; 4) discard empty partitions * * See CASSANDRA-13747 for more details. */ UnfilteredPartitionIterator merged = UnfilteredPartitionIterators.merge(results, mergeListener); Filter filter = new Filter(command.nowInSec(), command.metadata().enforceStrictLiveness()); FilteredPartitions filtered = FilteredPartitions.filter(merged, filter); PartitionIterator counted = Transformation.apply(preCountFilter.apply(filtered), context.mergedResultCounter); return Transformation.apply(counted, new EmptyPartitionsDiscarder()); } protected RepairedDataVerifier getRepairedDataVerifier(ReadCommand command) { return RepairedDataVerifier.verifier(command); } private String makeResponsesDebugString(DecoratedKey partitionKey) { return Joiner.on(",\n").join(transform(getMessages().snapshot(), m -> m.from() + " => " + m.payload.toDebugString(command, partitionKey))); } private UnfilteredPartitionIterators.MergeListener wrapMergeListener(UnfilteredPartitionIterators.MergeListener partitionListener, P sources, RepairedDataTracker repairedDataTracker) { // Avoid wrapping no-op listener as it doesn't throw, unless we're tracking repaired status // in which case we need to inject the tracker & verify on close if (partitionListener == UnfilteredPartitionIterators.MergeListener.NOOP) { if (repairedDataTracker == null) return partitionListener; return new UnfilteredPartitionIterators.MergeListener() { public UnfilteredRowIterators.MergeListener getRowMergeListener(DecoratedKey partitionKey, List<UnfilteredRowIterator> versions) { return UnfilteredRowIterators.MergeListener.NOOP; } public void close() { repairedDataTracker.verify(); } }; } return new UnfilteredPartitionIterators.MergeListener() { public UnfilteredRowIterators.MergeListener getRowMergeListener(DecoratedKey partitionKey, List<UnfilteredRowIterator> versions) { UnfilteredRowIterators.MergeListener rowListener = partitionListener.getRowMergeListener(partitionKey, versions); return new UnfilteredRowIterators.MergeListener() { public void onMergedPartitionLevelDeletion(DeletionTime mergedDeletion, DeletionTime[] versions) { try { rowListener.onMergedPartitionLevelDeletion(mergedDeletion, versions); } catch (AssertionError e) { // The following can be pretty verbose, but it's really only triggered if a bug happen, so we'd // rather get more info to debug than not. TableMetadata table = command.metadata(); String details = String.format("Error merging partition level deletion on %s: merged=%s, versions=%s, sources={%s}, debug info:%n %s", table, mergedDeletion == null ? "null" : mergedDeletion.toString(), '[' + Joiner.on(", ").join(transform(Arrays.asList(versions), rt -> rt == null ? "null" : rt.toString())) + ']', sources.contacts(), makeResponsesDebugString(partitionKey)); throw new AssertionError(details, e); } } public Row onMergedRows(Row merged, Row[] versions) { try { return rowListener.onMergedRows(merged, versions); } catch (AssertionError e) { // The following can be pretty verbose, but it's really only triggered if a bug happen, so we'd // rather get more info to debug than not. TableMetadata table = command.metadata(); String details = String.format("Error merging rows on %s: merged=%s, versions=%s, sources={%s}, debug info:%n %s", table, merged == null ? "null" : merged.toString(table), '[' + Joiner.on(", ").join(transform(Arrays.asList(versions), rt -> rt == null ? "null" : rt.toString(table))) + ']', sources.contacts(), makeResponsesDebugString(partitionKey)); throw new AssertionError(details, e); } } public void onMergedRangeTombstoneMarkers(RangeTombstoneMarker merged, RangeTombstoneMarker[] versions) { try { // The code for merging range tombstones is a tad complex and we had the assertions there triggered // unexpectedly in a few occasions (CASSANDRA-13237, CASSANDRA-13719). It's hard to get insights // when that happen without more context that what the assertion errors give us however, hence the // catch here that basically gather as much as context as reasonable. rowListener.onMergedRangeTombstoneMarkers(merged, versions); } catch (AssertionError e) { // The following can be pretty verbose, but it's really only triggered if a bug happen, so we'd // rather get more info to debug than not. TableMetadata table = command.metadata(); String details = String.format("Error merging RTs on %s: merged=%s, versions=%s, sources={%s}, debug info:%n %s", table, merged == null ? "null" : merged.toString(table), '[' + Joiner.on(", ").join(transform(Arrays.asList(versions), rt -> rt == null ? "null" : rt.toString(table))) + ']', sources.contacts(), makeResponsesDebugString(partitionKey)); throw new AssertionError(details, e); } } public void close() { rowListener.close(); } }; } public void close() { partitionListener.close(); if (repairedDataTracker != null) repairedDataTracker.verify(); } }; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.monitoring.integration; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPoolStats; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackUsageRequestBuilder; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkRequestBuilder; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkResponse; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.LocalStateMonitoring; import org.elasticsearch.xpack.monitoring.MonitoringService; import org.elasticsearch.xpack.monitoring.test.MockIngestPlugin; import java.io.IOException; import java.lang.Thread.State; import java.lang.management.LockInfo; import java.lang.management.ManagementFactory; import java.lang.management.MonitorInfo; import java.lang.management.ThreadInfo; import java.time.Instant; import java.time.ZoneOffset; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.threadpool.ThreadPool.Names.WRITE; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils.TEMPLATE_VERSION; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class MonitoringIT extends ESSingleNodeTestCase { @Override protected Settings nodeSettings() { return Settings.builder() .put(super.nodeSettings()) .put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false) .put("xpack.monitoring.collection.interval", MonitoringService.MIN_INTERVAL) .put("xpack.monitoring.exporters._local.type", "local") .put("xpack.monitoring.exporters._local.enabled", false) .put("xpack.monitoring.exporters._local.cluster_alerts.management.enabled", false) .build(); } @Override protected Collection<Class<? extends Plugin>> getPlugins() { return Arrays.asList(LocalStateMonitoring.class, MockIngestPlugin.class, CommonAnalysisPlugin.class); } private String createBulkEntity() { return "{\"index\":{\"_type\":\"monitoring_data_type\"}}\n" + "{\"foo\":{\"bar\":0}}\n" + "{\"index\":{\"_type\":\"monitoring_data_type\"}}\n" + "{\"foo\":{\"bar\":1}}\n" + "{\"index\":{\"_type\":\"monitoring_data_type\"}}\n" + "{\"foo\":{\"bar\":2}}\n" + "\n"; } /** * Monitoring Bulk test: * * This test uses the Monitoring Bulk Request to index documents. It then ensure that the documents were correctly * indexed and have the expected information. REST API tests (like how this is really called) are handled as part of the * XPackRest tests. */ public void testMonitoringBulk() throws Exception { whenExportersAreReady(() -> { final MonitoredSystem system = randomSystem(); final TimeValue interval = TimeValue.timeValueSeconds(randomIntBetween(1, 20)); final MonitoringBulkResponse bulkResponse = new MonitoringBulkRequestBuilder(client()).add( system, new BytesArray(createBulkEntity().getBytes("UTF-8")), XContentType.JSON, System.currentTimeMillis(), interval.millis() ).get(); assertThat(bulkResponse.status(), is(RestStatus.OK)); assertThat(bulkResponse.getError(), nullValue()); final String monitoringIndex = ".monitoring-" + system.getSystem() + "-" + TEMPLATE_VERSION + "-*"; // Wait for the monitoring index to be created assertBusy(() -> { // Monitoring uses auto_expand_replicas, so it should be green even without replicas ensureGreen(monitoringIndex); assertThat(client().admin().indices().prepareRefresh(monitoringIndex).get().getStatus(), is(RestStatus.OK)); final SearchResponse response = client().prepareSearch(".monitoring-" + system.getSystem() + "-" + TEMPLATE_VERSION + "-*") .get(); // exactly 3 results are expected assertThat("No monitoring documents yet", response.getHits().getTotalHits().value, equalTo(3L)); final List<Map<String, Object>> sources = Arrays.stream(response.getHits().getHits()) .map(SearchHit::getSourceAsMap) .collect(Collectors.toList()); // find distinct _source.timestamp fields assertThat(sources.stream().map(source -> source.get("timestamp")).distinct().count(), is(1L)); // find distinct _source.source_node fields (which is a map) assertThat(sources.stream().map(source -> source.get("source_node")).distinct().count(), is(1L)); }); final SearchResponse response = client().prepareSearch(monitoringIndex).get(); final SearchHits hits = response.getHits(); assertThat(response.getHits().getTotalHits().value, equalTo(3L)); assertThat( "Monitoring documents must have the same timestamp", Arrays.stream(hits.getHits()).map(hit -> extractValue("timestamp", hit.getSourceAsMap())).distinct().count(), equalTo(1L) ); assertThat( "Monitoring documents must have the same source_node timestamp", Arrays.stream(hits.getHits()).map(hit -> extractValue("source_node.timestamp", hit.getSourceAsMap())).distinct().count(), equalTo(1L) ); for (final SearchHit hit : hits.getHits()) { assertMonitoringDoc(toMap(hit), system, interval); } }); } /** * Monitoring Service test: * * This test waits for the monitoring service to collect monitoring documents and then checks that all expected documents * have been indexed with the expected information. */ public void testMonitoringService() throws Exception { final boolean createAPMIndex = randomBoolean(); final String indexName = createAPMIndex ? "apm-2017.11.06" : "books"; assertThat( client().prepareIndex(indexName) .setId("0") .setRefreshPolicy("true") .setSource("{\"field\":\"value\"}", XContentType.JSON) .get() .status(), is(RestStatus.CREATED) ); final Settings settings = Settings.builder().put("cluster.metadata.display_name", "my cluster").build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); whenExportersAreReady(() -> { final AtomicReference<SearchResponse> searchResponse = new AtomicReference<>(); assertBusy(() -> { final SearchResponse response = client().prepareSearch(".monitoring-es-*") .setCollapse(new CollapseBuilder("type")) .addSort("timestamp", SortOrder.DESC) .get(); assertThat(response.status(), is(RestStatus.OK)); assertThat( "Expecting a minimum number of 6 docs, one per collector", response.getHits().getHits().length, greaterThanOrEqualTo(6) ); searchResponse.set(response); }); for (final SearchHit hit : searchResponse.get().getHits()) { final Map<String, Object> searchHit = toMap(hit); assertMonitoringDoc(searchHit, MonitoredSystem.ES, MonitoringService.MIN_INTERVAL); } }); } /** * Asserts that the monitoring document (provided as a Map) contains the common information that * all monitoring documents must have */ @SuppressWarnings("unchecked") private void assertMonitoringDoc(final Map<String, Object> document, final MonitoredSystem expectedSystem, final TimeValue interval) { assertEquals(document.toString(), 3, document.size()); final String index = (String) document.get("_index"); assertThat(index, containsString(".monitoring-" + expectedSystem.getSystem() + "-" + TEMPLATE_VERSION + "-")); assertThat((String) document.get("_id"), is(not(emptyOrNullString()))); final Map<String, Object> source = (Map<String, Object>) document.get("_source"); assertThat(source, notNullValue()); assertThat((String) source.get("cluster_uuid"), is(not(emptyOrNullString()))); final String timestamp = (String) source.get("timestamp"); assertThat(timestamp, is(not(emptyOrNullString()))); assertThat(((Number) source.get("interval_ms")).longValue(), equalTo(interval.getMillis())); DateFormatter formatter = DateFormatter.forPattern("yyyy.MM.dd"); long isoTimestamp = Instant.from(DateFormatter.forPattern("strict_date_time").parse(timestamp)).toEpochMilli(); String isoDateTime = MonitoringTemplateUtils.indexName(formatter.withZone(ZoneOffset.UTC), expectedSystem, isoTimestamp); assertThat(index, equalTo(isoDateTime)); final Map<String, Object> sourceNode = (Map<String, Object>) source.get("source_node"); if (sourceNode != null) { assertMonitoringDocSourceNode(sourceNode); } } /** * Asserts that the source_node information (provided as a Map) of a monitoring document correspond to * the current local node information */ private void assertMonitoringDocSourceNode(final Map<String, Object> sourceNode) { assertEquals(6, sourceNode.size()); final NodesInfoResponse nodesResponse = client().admin().cluster().prepareNodesInfo().clear().get(); assertEquals(1, nodesResponse.getNodes().size()); final DiscoveryNode node = nodesResponse.getNodes().stream().findFirst().get().getNode(); assertThat(sourceNode.get("uuid"), equalTo(node.getId())); assertThat(sourceNode.get("host"), equalTo(node.getHostName())); assertThat(sourceNode.get("transport_address"), equalTo(node.getAddress().toString())); assertThat(sourceNode.get("ip"), equalTo(node.getAddress().getAddress())); assertThat(sourceNode.get("name"), equalTo(node.getName())); assertThat((String) sourceNode.get("timestamp"), is(not(emptyOrNullString()))); } /** * Executes the given {@link Runnable} once the monitoring exporters are ready and functional. Ensure that * the exporters and the monitoring service are shut down after the runnable has been executed. */ private void whenExportersAreReady(final CheckedRunnable<Exception> runnable) throws Exception { try { try { enableMonitoring(); } catch (AssertionError e) { // Added to debug https://github.com/elastic/elasticsearch/issues/29880 // Remove when fixed StringBuilder b = new StringBuilder(); b.append("\n==== jstack at monitoring enablement failure time ====\n"); for (ThreadInfo ti : ManagementFactory.getThreadMXBean().dumpAllThreads(true, true)) { append(b, ti); } b.append("^^==============================================\n"); logger.info(b.toString()); throw e; } runnable.run(); } finally { disableMonitoring(); } } // borrowed from randomized-testing private static void append(StringBuilder b, ThreadInfo ti) { b.append('"').append(ti.getThreadName()).append('"'); b.append(" ID=").append(ti.getThreadId()); final State threadState = ti.getThreadState(); b.append(" ").append(threadState); if (ti.getLockName() != null) { b.append(" on ").append(ti.getLockName()); } if (ti.getLockOwnerName() != null) { b.append(" owned by \"").append(ti.getLockOwnerName()).append("\" ID=").append(ti.getLockOwnerId()); } b.append(ti.isSuspended() ? " (suspended)" : ""); b.append(ti.isInNative() ? " (in native code)" : ""); b.append("\n"); final StackTraceElement[] stack = ti.getStackTrace(); final LockInfo lockInfo = ti.getLockInfo(); final MonitorInfo[] monitorInfos = ti.getLockedMonitors(); for (int i = 0; i < stack.length; i++) { b.append("\tat ").append(stack[i]).append("\n"); if (i == 0 && lockInfo != null) { b.append("\t- ").append(threadState).append(lockInfo).append("\n"); } for (MonitorInfo mi : monitorInfos) { if (mi.getLockedStackDepth() == i) { b.append("\t- locked ").append(mi).append("\n"); } } } LockInfo[] lockInfos = ti.getLockedSynchronizers(); if (lockInfos.length > 0) { b.append("\tLocked synchronizers:\n"); for (LockInfo li : ti.getLockedSynchronizers()) { b.append("\t- ").append(li).append("\n"); } } b.append("\n"); } /** * Enable the monitoring service and the Local exporter, waiting for some monitoring documents * to be indexed before it returns. */ public void enableMonitoring() throws Exception { // delete anything that may happen to already exist assertAcked(client().admin().indices().prepareDelete(".monitoring-*")); assertThat("Must be no enabled exporters before enabling monitoring", getMonitoringUsageExportersDefined(), is(false)); final Settings settings = Settings.builder() .put("xpack.monitoring.collection.enabled", true) .put("xpack.monitoring.exporters._local.type", "local") .put("xpack.monitoring.exporters._local.enabled", true) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); assertBusy(() -> assertThat("[_local] exporter not enabled yet", getMonitoringUsageExportersDefined(), is(true))); assertBusy(() -> { // Monitoring uses auto_expand_replicas, so it should be green even without replicas ensureGreen(".monitoring-es-*"); assertThat(client().admin().indices().prepareRefresh(".monitoring-es-*").get().getStatus(), is(RestStatus.OK)); assertThat( "No monitoring documents yet", client().prepareSearch(".monitoring-es-" + TEMPLATE_VERSION + "-*").setSize(0).get().getHits().getTotalHits().value, greaterThan(0L) ); }, 30L, TimeUnit.SECONDS); } /** * Disable the monitoring service and the Local exporter. */ public void disableMonitoring() throws Exception { final Settings settings = Settings.builder() .putNull("xpack.monitoring.collection.enabled") .putNull("xpack.monitoring.exporters._local.type") .putNull("xpack.monitoring.exporters._local.enabled") .putNull("cluster.metadata.display_name") .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings)); assertBusy(() -> assertThat("Exporters are not yet stopped", getMonitoringUsageExportersDefined(), is(false))); assertBusy(() -> { try { // now wait until Monitoring has actually stopped final NodesStatsResponse response = client().admin().cluster().prepareNodesStats().clear().setThreadPool(true).get(); for (final NodeStats nodeStats : response.getNodes()) { boolean foundBulkThreads = false; for (final ThreadPoolStats.Stats threadPoolStats : nodeStats.getThreadPool()) { if (WRITE.equals(threadPoolStats.getName())) { foundBulkThreads = true; assertThat("Still some active _bulk threads!", threadPoolStats.getActive(), equalTo(0)); break; } } assertThat("Could not find bulk thread pool", foundBulkThreads, is(true)); } } catch (Exception e) { throw new ElasticsearchException("Failed to wait for monitoring exporters to stop:", e); } }, 30L, TimeUnit.SECONDS); } private boolean getMonitoringUsageExportersDefined() throws Exception { final XPackUsageResponse usageResponse = new XPackUsageRequestBuilder(client()).execute().get(); final Optional<MonitoringFeatureSetUsage> monitoringUsage = usageResponse.getUsages() .stream() .filter(usage -> usage instanceof MonitoringFeatureSetUsage) .map(usage -> (MonitoringFeatureSetUsage) usage) .findFirst(); assertThat("Monitoring feature set does not exist", monitoringUsage.isPresent(), is(true)); return monitoringUsage.get().getExporters().isEmpty() == false; } /** * Returns the {@link SearchHit} content as a {@link Map} object. */ private static Map<String, Object> toMap(final ToXContentObject xContentObject) throws IOException { final XContentType xContentType = XContentType.JSON; try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { xContentObject.toXContent(builder, EMPTY_PARAMS); final Map<String, Object> map = XContentHelper.convertToMap(xContentType.xContent(), Strings.toString(builder), false); // remove extraneous fields not actually wanted from the response map.remove("_score"); map.remove("fields"); map.remove("sort"); return map; } } /** * Returns a {@link MonitoredSystem} supported by the Monitoring Bulk API */ private static MonitoredSystem randomSystem() { return randomFrom(MonitoredSystem.LOGSTASH, MonitoredSystem.KIBANA, MonitoredSystem.BEATS); } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media; import android.graphics.Rect; import android.os.Parcel; import android.util.Log; import java.util.HashMap; import java.util.Set; import java.util.List; import java.util.ArrayList; /** * Class to hold the timed text's metadata, including: * <ul> * <li> The characters for rendering</li> * <li> The rendering position for the timed text</li> * </ul> * * <p> To render the timed text, applications need to do the following: * * <ul> * <li> Implement the {@link MediaPlayer.OnTimedTextListener} interface</li> * <li> Register the {@link MediaPlayer.OnTimedTextListener} callback on a MediaPlayer object that is used for playback</li> * <li> When a onTimedText callback is received, do the following: * <ul> * <li> call {@link #getText} to get the characters for rendering</li> * <li> call {@link #getBounds} to get the text rendering area/region</li> * </ul> * </li> * </ul> * * @see android.media.MediaPlayer */ public final class TimedText { private static final int FIRST_PUBLIC_KEY = 1; // These keys must be in sync with the keys in TextDescription.h private static final int KEY_DISPLAY_FLAGS = 1; // int private static final int KEY_STYLE_FLAGS = 2; // int private static final int KEY_BACKGROUND_COLOR_RGBA = 3; // int private static final int KEY_HIGHLIGHT_COLOR_RGBA = 4; // int private static final int KEY_SCROLL_DELAY = 5; // int private static final int KEY_WRAP_TEXT = 6; // int private static final int KEY_START_TIME = 7; // int private static final int KEY_STRUCT_BLINKING_TEXT_LIST = 8; // List<CharPos> private static final int KEY_STRUCT_FONT_LIST = 9; // List<Font> private static final int KEY_STRUCT_HIGHLIGHT_LIST = 10; // List<CharPos> private static final int KEY_STRUCT_HYPER_TEXT_LIST = 11; // List<HyperText> private static final int KEY_STRUCT_KARAOKE_LIST = 12; // List<Karaoke> private static final int KEY_STRUCT_STYLE_LIST = 13; // List<Style> private static final int KEY_STRUCT_TEXT_POS = 14; // TextPos private static final int KEY_STRUCT_JUSTIFICATION = 15; // Justification private static final int KEY_STRUCT_TEXT = 16; // Text private static final int LAST_PUBLIC_KEY = 16; private static final int FIRST_PRIVATE_KEY = 101; // The following keys are used between TimedText.java and // TextDescription.cpp in order to parce the Parcel. private static final int KEY_GLOBAL_SETTING = 101; private static final int KEY_LOCAL_SETTING = 102; private static final int KEY_START_CHAR = 103; private static final int KEY_END_CHAR = 104; private static final int KEY_FONT_ID = 105; private static final int KEY_FONT_SIZE = 106; private static final int KEY_TEXT_COLOR_RGBA = 107; private static final int LAST_PRIVATE_KEY = 107; private static final String TAG = "TimedText"; private final HashMap<Integer, Object> mKeyObjectMap = new HashMap<Integer, Object>(); private int mDisplayFlags = -1; private int mBackgroundColorRGBA = -1; private int mHighlightColorRGBA = -1; private int mScrollDelay = -1; private int mWrapText = -1; private List<CharPos> mBlinkingPosList = null; private List<CharPos> mHighlightPosList = null; private List<Karaoke> mKaraokeList = null; private List<Font> mFontList = null; private List<Style> mStyleList = null; private List<HyperText> mHyperTextList = null; private Rect mTextBounds = null; private String mTextChars = null; private Justification mJustification; /** * Helper class to hold the start char offset and end char offset * for Blinking Text or Highlight Text. endChar is the end offset * of the text (startChar + number of characters to be highlighted * or blinked). The member variables in this class are read-only. * {@hide} */ public static final class CharPos { /** * The offset of the start character */ public final int startChar; /** * The offset of the end character */ public final int endChar; /** * Constuctor * @param startChar the offset of the start character. * @param endChar the offset of the end character. */ public CharPos(int startChar, int endChar) { this.startChar = startChar; this.endChar = endChar; } } /** * Helper class to hold the justification for text display in the text box. * The member variables in this class are read-only. * {@hide} */ public static final class Justification { /** * horizontal justification 0: left, 1: centered, -1: right */ public final int horizontalJustification; /** * vertical justification 0: top, 1: centered, -1: bottom */ public final int verticalJustification; /** * Constructor * @param horizontal the horizontal justification of the text. * @param vertical the vertical justification of the text. */ public Justification(int horizontal, int vertical) { this.horizontalJustification = horizontal; this.verticalJustification = vertical; } } /** * Helper class to hold the style information to display the text. * The member variables in this class are read-only. * {@hide} */ public static final class Style { /** * The offset of the start character which applys this style */ public final int startChar; /** * The offset of the end character which applys this style */ public final int endChar; /** * ID of the font. This ID will be used to choose the font * to be used from the font list. */ public final int fontID; /** * True if the characters should be bold */ public final boolean isBold; /** * True if the characters should be italic */ public final boolean isItalic; /** * True if the characters should be underlined */ public final boolean isUnderlined; /** * The size of the font */ public final int fontSize; /** * To specify the RGBA color: 8 bits each of red, green, blue, * and an alpha(transparency) value */ public final int colorRGBA; /** * Constructor * @param startChar the offset of the start character which applys this style * @param endChar the offset of the end character which applys this style * @param fontId the ID of the font. * @param isBold whether the characters should be bold. * @param isItalic whether the characters should be italic. * @param isUnderlined whether the characters should be underlined. * @param fontSize the size of the font. * @param colorRGBA red, green, blue, and alpha value for color. */ public Style(int startChar, int endChar, int fontId, boolean isBold, boolean isItalic, boolean isUnderlined, int fontSize, int colorRGBA) { this.startChar = startChar; this.endChar = endChar; this.fontID = fontId; this.isBold = isBold; this.isItalic = isItalic; this.isUnderlined = isUnderlined; this.fontSize = fontSize; this.colorRGBA = colorRGBA; } } /** * Helper class to hold the font ID and name. * The member variables in this class are read-only. * {@hide} */ public static final class Font { /** * The font ID */ public final int ID; /** * The font name */ public final String name; /** * Constructor * @param id the font ID. * @param name the font name. */ public Font(int id, String name) { this.ID = id; this.name = name; } } /** * Helper class to hold the karaoke information. * The member variables in this class are read-only. * {@hide} */ public static final class Karaoke { /** * The start time (in milliseconds) to highlight the characters * specified by startChar and endChar. */ public final int startTimeMs; /** * The end time (in milliseconds) to highlight the characters * specified by startChar and endChar. */ public final int endTimeMs; /** * The offset of the start character to be highlighted */ public final int startChar; /** * The offset of the end character to be highlighted */ public final int endChar; /** * Constructor * @param startTimeMs the start time (in milliseconds) to highlight * the characters between startChar and endChar. * @param endTimeMs the end time (in milliseconds) to highlight * the characters between startChar and endChar. * @param startChar the offset of the start character to be highlighted. * @param endChar the offset of the end character to be highlighted. */ public Karaoke(int startTimeMs, int endTimeMs, int startChar, int endChar) { this.startTimeMs = startTimeMs; this.endTimeMs = endTimeMs; this.startChar = startChar; this.endChar = endChar; } } /** * Helper class to hold the hyper text information. * The member variables in this class are read-only. * {@hide} */ public static final class HyperText { /** * The offset of the start character */ public final int startChar; /** * The offset of the end character */ public final int endChar; /** * The linked-to URL */ public final String URL; /** * The "alt" string for user display */ public final String altString; /** * Constructor * @param startChar the offset of the start character. * @param endChar the offset of the end character. * @param url the linked-to URL. * @param alt the "alt" string for display. */ public HyperText(int startChar, int endChar, String url, String alt) { this.startChar = startChar; this.endChar = endChar; this.URL = url; this.altString = alt; } } /** * @param obj the byte array which contains the timed text. * @throws IllegalArgumentExcept if parseParcel() fails. * {@hide} */ public TimedText(Parcel parcel) { if (!parseParcel(parcel)) { mKeyObjectMap.clear(); throw new IllegalArgumentException("parseParcel() fails"); } } /** * Get the characters in the timed text. * * @return the characters as a String object in the TimedText. Applications * should stop rendering previous timed text at the current rendering region if * a null is returned, until the next non-null timed text is received. */ public String getText() { return mTextChars; } /** * Get the rectangle area or region for rendering the timed text as specified * by a Rect object. * * @return the rectangle region to render the characters in the timed text. * If no bounds information is available (a null is returned), render the * timed text at the center bottom of the display. */ public Rect getBounds() { return mTextBounds; } /* * Go over all the records, collecting metadata keys and fields in the * Parcel. These are stored in mKeyObjectMap for application to retrieve. * @return false if an error occurred during parsing. Otherwise, true. */ private boolean parseParcel(Parcel parcel) { parcel.setDataPosition(0); if (parcel.dataAvail() == 0) { return false; } int type = parcel.readInt(); if (type == KEY_LOCAL_SETTING) { type = parcel.readInt(); if (type != KEY_START_TIME) { return false; } int mStartTimeMs = parcel.readInt(); mKeyObjectMap.put(type, mStartTimeMs); type = parcel.readInt(); if (type != KEY_STRUCT_TEXT) { return false; } int textLen = parcel.readInt(); byte[] text = parcel.createByteArray(); if (text == null || text.length == 0) { mTextChars = null; } else { mTextChars = new String(text); } } else if (type != KEY_GLOBAL_SETTING) { Log.w(TAG, "Invalid timed text key found: " + type); return false; } while (parcel.dataAvail() > 0) { int key = parcel.readInt(); if (!isValidKey(key)) { Log.w(TAG, "Invalid timed text key found: " + key); return false; } Object object = null; switch (key) { case KEY_STRUCT_STYLE_LIST: { readStyle(parcel); object = mStyleList; break; } case KEY_STRUCT_FONT_LIST: { readFont(parcel); object = mFontList; break; } case KEY_STRUCT_HIGHLIGHT_LIST: { readHighlight(parcel); object = mHighlightPosList; break; } case KEY_STRUCT_KARAOKE_LIST: { readKaraoke(parcel); object = mKaraokeList; break; } case KEY_STRUCT_HYPER_TEXT_LIST: { readHyperText(parcel); object = mHyperTextList; break; } case KEY_STRUCT_BLINKING_TEXT_LIST: { readBlinkingText(parcel); object = mBlinkingPosList; break; } case KEY_WRAP_TEXT: { mWrapText = parcel.readInt(); object = mWrapText; break; } case KEY_HIGHLIGHT_COLOR_RGBA: { mHighlightColorRGBA = parcel.readInt(); object = mHighlightColorRGBA; break; } case KEY_DISPLAY_FLAGS: { mDisplayFlags = parcel.readInt(); object = mDisplayFlags; break; } case KEY_STRUCT_JUSTIFICATION: { int horizontal = parcel.readInt(); int vertical = parcel.readInt(); mJustification = new Justification(horizontal, vertical); object = mJustification; break; } case KEY_BACKGROUND_COLOR_RGBA: { mBackgroundColorRGBA = parcel.readInt(); object = mBackgroundColorRGBA; break; } case KEY_STRUCT_TEXT_POS: { int top = parcel.readInt(); int left = parcel.readInt(); int bottom = parcel.readInt(); int right = parcel.readInt(); mTextBounds = new Rect(left, top, right, bottom); break; } case KEY_SCROLL_DELAY: { mScrollDelay = parcel.readInt(); object = mScrollDelay; break; } default: { break; } } if (object != null) { if (mKeyObjectMap.containsKey(key)) { mKeyObjectMap.remove(key); } // Previous mapping will be replaced with the new object, if there was one. mKeyObjectMap.put(key, object); } } return true; } /* * To parse and store the Style list. */ private void readStyle(Parcel parcel) { boolean endOfStyle = false; int startChar = -1; int endChar = -1; int fontId = -1; boolean isBold = false; boolean isItalic = false; boolean isUnderlined = false; int fontSize = -1; int colorRGBA = -1; while (!endOfStyle && (parcel.dataAvail() > 0)) { int key = parcel.readInt(); switch (key) { case KEY_START_CHAR: { startChar = parcel.readInt(); break; } case KEY_END_CHAR: { endChar = parcel.readInt(); break; } case KEY_FONT_ID: { fontId = parcel.readInt(); break; } case KEY_STYLE_FLAGS: { int flags = parcel.readInt(); // In the absence of any bits set in flags, the text // is plain. Otherwise, 1: bold, 2: italic, 4: underline isBold = ((flags % 2) == 1); isItalic = ((flags % 4) >= 2); isUnderlined = ((flags / 4) == 1); break; } case KEY_FONT_SIZE: { fontSize = parcel.readInt(); break; } case KEY_TEXT_COLOR_RGBA: { colorRGBA = parcel.readInt(); break; } default: { // End of the Style parsing. Reset the data position back // to the position before the last parcel.readInt() call. parcel.setDataPosition(parcel.dataPosition() - 4); endOfStyle = true; break; } } } Style style = new Style(startChar, endChar, fontId, isBold, isItalic, isUnderlined, fontSize, colorRGBA); if (mStyleList == null) { mStyleList = new ArrayList<Style>(); } mStyleList.add(style); } /* * To parse and store the Font list */ private void readFont(Parcel parcel) { int entryCount = parcel.readInt(); for (int i = 0; i < entryCount; i++) { int id = parcel.readInt(); int nameLen = parcel.readInt(); byte[] text = parcel.createByteArray(); final String name = new String(text, 0, nameLen); Font font = new Font(id, name); if (mFontList == null) { mFontList = new ArrayList<Font>(); } mFontList.add(font); } } /* * To parse and store the Highlight list */ private void readHighlight(Parcel parcel) { int startChar = parcel.readInt(); int endChar = parcel.readInt(); CharPos pos = new CharPos(startChar, endChar); if (mHighlightPosList == null) { mHighlightPosList = new ArrayList<CharPos>(); } mHighlightPosList.add(pos); } /* * To parse and store the Karaoke list */ private void readKaraoke(Parcel parcel) { int entryCount = parcel.readInt(); for (int i = 0; i < entryCount; i++) { int startTimeMs = parcel.readInt(); int endTimeMs = parcel.readInt(); int startChar = parcel.readInt(); int endChar = parcel.readInt(); Karaoke kara = new Karaoke(startTimeMs, endTimeMs, startChar, endChar); if (mKaraokeList == null) { mKaraokeList = new ArrayList<Karaoke>(); } mKaraokeList.add(kara); } } /* * To parse and store HyperText list */ private void readHyperText(Parcel parcel) { int startChar = parcel.readInt(); int endChar = parcel.readInt(); int len = parcel.readInt(); byte[] url = parcel.createByteArray(); final String urlString = new String(url, 0, len); len = parcel.readInt(); byte[] alt = parcel.createByteArray(); final String altString = new String(alt, 0, len); HyperText hyperText = new HyperText(startChar, endChar, urlString, altString); if (mHyperTextList == null) { mHyperTextList = new ArrayList<HyperText>(); } mHyperTextList.add(hyperText); } /* * To parse and store blinking text list */ private void readBlinkingText(Parcel parcel) { int startChar = parcel.readInt(); int endChar = parcel.readInt(); CharPos blinkingPos = new CharPos(startChar, endChar); if (mBlinkingPosList == null) { mBlinkingPosList = new ArrayList<CharPos>(); } mBlinkingPosList.add(blinkingPos); } /* * To check whether the given key is valid. * @param key the key to be checked. * @return true if the key is a valid one. Otherwise, false. */ private boolean isValidKey(final int key) { if (!((key >= FIRST_PUBLIC_KEY) && (key <= LAST_PUBLIC_KEY)) && !((key >= FIRST_PRIVATE_KEY) && (key <= LAST_PRIVATE_KEY))) { return false; } return true; } /* * To check whether the given key is contained in this TimedText object. * @param key the key to be checked. * @return true if the key is contained in this TimedText object. * Otherwise, false. */ private boolean containsKey(final int key) { if (isValidKey(key) && mKeyObjectMap.containsKey(key)) { return true; } return false; } /* * @return a set of the keys contained in this TimedText object. */ private Set keySet() { return mKeyObjectMap.keySet(); } /* * To retrieve the object associated with the key. Caller must make sure * the key is present using the containsKey method otherwise a * RuntimeException will occur. * @param key the key used to retrieve the object. * @return an object. The object could be 1) an instance of Integer; 2) a * List of CharPos, Karaoke, Font, Style, and HyperText, or 3) an instance of * Justification. */ private Object getObject(final int key) { if (containsKey(key)) { return mKeyObjectMap.get(key); } else { throw new IllegalArgumentException("Invalid key: " + key); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; @Category({RegionServerTests.class, SmallTests.class}) public class TestRegionSplitPolicy { private Configuration conf; private HTableDescriptor htd; private HRegion mockRegion; private List<Store> stores; private static final TableName TABLENAME = TableName.valueOf("t"); @Before public void setupMocks() { conf = HBaseConfiguration.create(); HRegionInfo hri = new HRegionInfo(TABLENAME); htd = new HTableDescriptor(TABLENAME); mockRegion = Mockito.mock(HRegion.class); Mockito.doReturn(htd).when(mockRegion).getTableDesc(); Mockito.doReturn(hri).when(mockRegion).getRegionInfo(); stores = new ArrayList<Store>(); Mockito.doReturn(stores).when(mockRegion).getStores(); } @Test public void testIncreasingToUpperBoundRegionSplitPolicy() throws IOException { // Configure IncreasingToUpperBoundRegionSplitPolicy as our split policy conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, IncreasingToUpperBoundRegionSplitPolicy.class.getName()); // Now make it so the mock region has a RegionServerService that will // return 'online regions'. RegionServerServices rss = Mockito.mock(RegionServerServices.class); final List<Region> regions = new ArrayList<Region>(); Mockito.when(rss.getOnlineRegions(TABLENAME)).thenReturn(regions); Mockito.when(mockRegion.getRegionServerServices()).thenReturn(rss); // Set max size for this 'table'. long maxSplitSize = 1024L; htd.setMaxFileSize(maxSplitSize); // Set flush size to 1/8. IncreasingToUpperBoundRegionSplitPolicy // grows by the cube of the number of regions times flushsize each time. long flushSize = maxSplitSize/8; conf.setLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, flushSize); htd.setMemStoreFlushSize(flushSize); // If RegionServerService with no regions in it -- 'online regions' == 0 -- // then IncreasingToUpperBoundRegionSplitPolicy should act like a // ConstantSizePolicy IncreasingToUpperBoundRegionSplitPolicy policy = (IncreasingToUpperBoundRegionSplitPolicy)RegionSplitPolicy.create(mockRegion, conf); doConstantSizePolicyTests(policy); // Add a store in excess of split size. Because there are "no regions" // on this server -- rss.getOnlineRegions is 0 -- then we should split // like a constantsizeregionsplitpolicy would HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); stores.add(mockStore); // It should split assertTrue(policy.shouldSplit()); // Now test that we increase our split size as online regions for a table // grows. With one region, split size should be flushsize. regions.add(mockRegion); Mockito.doReturn(flushSize).when(mockStore).getSize(); // Should not split since store is flush size. assertFalse(policy.shouldSplit()); // Set size of store to be > 2*flush size and we should split Mockito.doReturn(flushSize*2 + 1).when(mockStore).getSize(); assertTrue(policy.shouldSplit()); // Add another region to the 'online regions' on this server and we should // now be no longer be splittable since split size has gone up. regions.add(mockRegion); assertFalse(policy.shouldSplit()); // make sure its just over; verify it'll split Mockito.doReturn((long)(maxSplitSize * 1.25 + 1)).when(mockStore).getSize(); assertTrue(policy.shouldSplit()); // Finally assert that even if loads of regions, we'll split at max size assertWithinJitter(maxSplitSize, policy.getSizeToCheck(1000)); // Assert same is true if count of regions is zero. assertWithinJitter(maxSplitSize, policy.getSizeToCheck(0)); } private void assertWithinJitter(long maxSplitSize, long sizeToCheck) { assertTrue("Size greater than lower bound of jitter", (long)(maxSplitSize * 0.75) <= sizeToCheck); assertTrue("Size less than upper bound of jitter", (long)(maxSplitSize * 1.25) >= sizeToCheck); } @Test public void testCreateDefault() throws IOException { conf.setLong(HConstants.HREGION_MAX_FILESIZE, 1234L); // Using a default HTD, should pick up the file size from // configuration. ConstantSizeRegionSplitPolicy policy = (ConstantSizeRegionSplitPolicy)RegionSplitPolicy.create( mockRegion, conf); assertWithinJitter(1234L, policy.getDesiredMaxFileSize()); // If specified in HTD, should use that htd.setMaxFileSize(9999L); policy = (ConstantSizeRegionSplitPolicy)RegionSplitPolicy.create( mockRegion, conf); assertWithinJitter(9999L, policy.getDesiredMaxFileSize()); } /** * Test setting up a customized split policy */ @Test public void testCustomPolicy() throws IOException { HTableDescriptor myHtd = new HTableDescriptor(); myHtd.setValue(HTableDescriptor.SPLIT_POLICY, KeyPrefixRegionSplitPolicy.class.getName()); myHtd.setValue(KeyPrefixRegionSplitPolicy.PREFIX_LENGTH_KEY, String.valueOf(2)); HRegion myMockRegion = Mockito.mock(HRegion.class); Mockito.doReturn(myHtd).when(myMockRegion).getTableDesc(); Mockito.doReturn(stores).when(myMockRegion).getStores(); HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); Mockito.doReturn(Bytes.toBytes("abcd")).when(mockStore).getSplitPoint(); stores.add(mockStore); KeyPrefixRegionSplitPolicy policy = (KeyPrefixRegionSplitPolicy) RegionSplitPolicy .create(myMockRegion, conf); assertEquals("ab", Bytes.toString(policy.getSplitPoint())); Mockito.doReturn(true).when(myMockRegion).shouldForceSplit(); Mockito.doReturn(Bytes.toBytes("efgh")).when(myMockRegion) .getExplicitSplitPoint(); policy = (KeyPrefixRegionSplitPolicy) RegionSplitPolicy .create(myMockRegion, conf); assertEquals("ef", Bytes.toString(policy.getSplitPoint())); } @Test public void testConstantSizePolicy() throws IOException { htd.setMaxFileSize(1024L); ConstantSizeRegionSplitPolicy policy = (ConstantSizeRegionSplitPolicy)RegionSplitPolicy.create(mockRegion, conf); doConstantSizePolicyTests(policy); } /** * Run through tests for a ConstantSizeRegionSplitPolicy * @param policy */ private void doConstantSizePolicyTests(final ConstantSizeRegionSplitPolicy policy) { // For no stores, should not split assertFalse(policy.shouldSplit()); // Add a store above the requisite size. Should split. HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); stores.add(mockStore); assertTrue(policy.shouldSplit()); // Act as if there's a reference file or some other reason it can't split. // This should prevent splitting even though it's big enough. Mockito.doReturn(false).when(mockStore).canSplit(); assertFalse(policy.shouldSplit()); // Reset splittability after above Mockito.doReturn(true).when(mockStore).canSplit(); // Set to a small size but turn on forceSplit. Should result in a split. Mockito.doReturn(true).when(mockRegion).shouldForceSplit(); Mockito.doReturn(100L).when(mockStore).getSize(); assertTrue(policy.shouldSplit()); // Turn off forceSplit, should not split Mockito.doReturn(false).when(mockRegion).shouldForceSplit(); assertFalse(policy.shouldSplit()); // Clear families we added above stores.clear(); } @Test public void testGetSplitPoint() throws IOException { ConstantSizeRegionSplitPolicy policy = (ConstantSizeRegionSplitPolicy)RegionSplitPolicy.create(mockRegion, conf); // For no stores, should not split assertFalse(policy.shouldSplit()); assertNull(policy.getSplitPoint()); // Add a store above the requisite size. Should split. HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); Mockito.doReturn(Bytes.toBytes("store 1 split")) .when(mockStore).getSplitPoint(); stores.add(mockStore); assertEquals("store 1 split", Bytes.toString(policy.getSplitPoint())); // Add a bigger store. The split point should come from that one HStore mockStore2 = Mockito.mock(HStore.class); Mockito.doReturn(4000L).when(mockStore2).getSize(); Mockito.doReturn(true).when(mockStore2).canSplit(); Mockito.doReturn(Bytes.toBytes("store 2 split")) .when(mockStore2).getSplitPoint(); stores.add(mockStore2); assertEquals("store 2 split", Bytes.toString(policy.getSplitPoint())); } @Test public void testDelimitedKeyPrefixRegionSplitPolicy() throws IOException { HTableDescriptor myHtd = new HTableDescriptor(); myHtd.setValue(HTableDescriptor.SPLIT_POLICY, DelimitedKeyPrefixRegionSplitPolicy.class.getName()); myHtd.setValue(DelimitedKeyPrefixRegionSplitPolicy.DELIMITER_KEY, ","); HRegion myMockRegion = Mockito.mock(HRegion.class); Mockito.doReturn(myHtd).when(myMockRegion).getTableDesc(); Mockito.doReturn(stores).when(myMockRegion).getStores(); HStore mockStore = Mockito.mock(HStore.class); Mockito.doReturn(2000L).when(mockStore).getSize(); Mockito.doReturn(true).when(mockStore).canSplit(); Mockito.doReturn(Bytes.toBytes("ab,cd")).when(mockStore).getSplitPoint(); stores.add(mockStore); DelimitedKeyPrefixRegionSplitPolicy policy = (DelimitedKeyPrefixRegionSplitPolicy) RegionSplitPolicy .create(myMockRegion, conf); assertEquals("ab", Bytes.toString(policy.getSplitPoint())); Mockito.doReturn(true).when(myMockRegion).shouldForceSplit(); Mockito.doReturn(Bytes.toBytes("efg,h")).when(myMockRegion) .getExplicitSplitPoint(); policy = (DelimitedKeyPrefixRegionSplitPolicy) RegionSplitPolicy .create(myMockRegion, conf); assertEquals("efg", Bytes.toString(policy.getSplitPoint())); Mockito.doReturn(Bytes.toBytes("ijk")).when(myMockRegion) .getExplicitSplitPoint(); assertEquals("ijk", Bytes.toString(policy.getSplitPoint())); } }
/* * CPAchecker is a tool for configurable software verification. * This file is part of CPAchecker. * * Copyright (C) 2007-2013 Dirk Beyer * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * CPAchecker web page: * http://cpachecker.sosy-lab.org */ package org.sosy_lab.cpachecker.cpa.smg.SMGJoin; import java.util.ArrayDeque; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.sosy_lab.cpachecker.cpa.smg.CLangSMG; import org.sosy_lab.cpachecker.cpa.smg.CLangStackFrame; import org.sosy_lab.cpachecker.cpa.smg.SMGInconsistentException; import org.sosy_lab.cpachecker.cpa.smg.objects.SMGObject; import org.sosy_lab.cpachecker.cpa.smg.objects.SMGRegion; final public class SMGJoin { static public void performChecks(boolean pOn) { SMGJoinSubSMGs.performChecks(pOn); } private boolean defined = false; private SMGJoinStatus status = SMGJoinStatus.EQUAL; private final CLangSMG smg; public SMGJoin(CLangSMG pSMG1, CLangSMG pSMG2) throws SMGInconsistentException { CLangSMG opSMG1 = new CLangSMG(pSMG1); CLangSMG opSMG2 = new CLangSMG(pSMG2); smg = new CLangSMG(opSMG1.getMachineModel()); SMGNodeMapping mapping1 = new SMGNodeMapping(); SMGNodeMapping mapping2 = new SMGNodeMapping(); Map<String, SMGRegion> globals_in_smg1 = opSMG1.getGlobalObjects(); ArrayDeque<CLangStackFrame> stack_in_smg1 = opSMG1.getStackFrames(); Map<String, SMGRegion> globals_in_smg2 = opSMG2.getGlobalObjects(); ArrayDeque<CLangStackFrame> stack_in_smg2 = opSMG2.getStackFrames(); Set<String> globalVars = new HashSet<>(); globalVars.addAll(globals_in_smg1.keySet()); globalVars.addAll(globals_in_smg2.keySet()); for (String globalVar : globalVars) { SMGRegion globalInSMG1 = globals_in_smg1.get(globalVar); SMGRegion globalInSMG2 = globals_in_smg2.get(globalVar); if (globalInSMG1 == null || globalInSMG2 == null) { // This weird situation happens with function static variables, which are created // as globals when a declaration is met. So if one path goes through function and other // does not, then one SMG will have that global and the other one won't. // TODO: We could actually just add that object, as that should not influence the result of // the join. For now, we will treat this situation as unjoinable. return; } SMGRegion finalObject = new SMGRegion(globalInSMG1); smg.addGlobalObject(finalObject); mapping1.map(globalInSMG1, finalObject); mapping2.map(globalInSMG2, finalObject); } Iterator<CLangStackFrame> smg1stackIterator = stack_in_smg1.descendingIterator(); Iterator<CLangStackFrame> smg2stackIterator = stack_in_smg2.descendingIterator(); while ( smg1stackIterator.hasNext() && smg2stackIterator.hasNext() ){ CLangStackFrame frameInSMG1 = smg1stackIterator.next(); CLangStackFrame frameInSMG2 = smg2stackIterator.next(); smg.addStackFrame(frameInSMG1.getFunctionDeclaration()); Set<String> localVars = new HashSet<>(); localVars.addAll(frameInSMG1.getVariables().keySet()); localVars.addAll(frameInSMG2.getVariables().keySet()); for (String localVar : localVars) { if ((!frameInSMG1.containsVariable(localVar)) || (!frameInSMG2.containsVariable(localVar))) { return; } SMGRegion localInSMG1 = frameInSMG1.getVariable(localVar); SMGRegion localInSMG2 = frameInSMG2.getVariable(localVar); SMGRegion finalObject = new SMGRegion(localInSMG1); smg.addStackObject(finalObject); mapping1.map(localInSMG1, finalObject); mapping2.map(localInSMG2, finalObject); } } for (String globalVar : globals_in_smg1.keySet()) { SMGObject globalInSMG1 = globals_in_smg1.get(globalVar); SMGObject globalInSMG2 = globals_in_smg2.get(globalVar); SMGObject destinationGlobal = mapping1.get(globalInSMG1); SMGJoinSubSMGs jss = new SMGJoinSubSMGs(status, opSMG1, opSMG2, smg, mapping1, mapping2, globalInSMG1, globalInSMG2, destinationGlobal); if (! jss.isDefined()) { return; } status = jss.getStatus(); } smg1stackIterator = stack_in_smg1.iterator(); smg2stackIterator = stack_in_smg2.iterator(); while ( smg1stackIterator.hasNext() && smg2stackIterator.hasNext() ){ CLangStackFrame frameInSMG1 = smg1stackIterator.next(); CLangStackFrame frameInSMG2 = smg2stackIterator.next(); for (String localVar : frameInSMG1.getVariables().keySet()) { SMGObject localInSMG1 = frameInSMG1.getVariable(localVar); SMGObject localInSMG2 = frameInSMG2.getVariable(localVar); SMGObject destinationLocal = mapping1.get(localInSMG1); SMGJoinSubSMGs jss = new SMGJoinSubSMGs(status, opSMG1, opSMG2, smg, mapping1, mapping2, localInSMG1, localInSMG2, destinationLocal); if (! jss.isDefined()) { return; } status = jss.getStatus(); } } defined = true; } public boolean isDefined() { return defined; } public SMGJoinStatus getStatus() { return status; } public CLangSMG getJointSMG() { return smg; } } class SMGNodeMapping { final private Map<SMGObject, SMGObject> object_map = new HashMap<>(); final private Map<Integer, Integer> value_map = new HashMap<>(); @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((object_map == null) ? 0 : object_map.hashCode()); result = prime * result + ((value_map == null) ? 0 : value_map.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } SMGNodeMapping other = (SMGNodeMapping) obj; if (object_map == null) { if (other.object_map != null) { return false; } } else if (!object_map.equals(other.object_map)) { return false; } if (value_map == null) { if (other.value_map != null) { return false; } } else if (!value_map.equals(other.value_map)) { return false; } return true; } public SMGNodeMapping() {} public SMGNodeMapping(SMGNodeMapping origin) { object_map.putAll(origin.object_map); value_map.putAll(origin.value_map); } public Integer get(Integer i) { return value_map.get(i); } public SMGObject get (SMGObject o) { return object_map.get(o); } public void map(SMGObject key, SMGObject value){ object_map.put(key, value); } public void map(Integer key, Integer value) { value_map.put(key, value); } public boolean containsKey(Integer key) { return value_map.containsKey(key); } public boolean containsKey(SMGObject key) { return object_map.containsKey(key); } public boolean containsValue(SMGObject value) { return object_map.containsValue(value); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.impala.rewrite; import java.util.ArrayList; import java.util.List; import org.apache.impala.analysis.Analyzer; import org.apache.impala.analysis.BinaryPredicate; import org.apache.impala.analysis.BoolLiteral; import org.apache.impala.analysis.CaseExpr; import org.apache.impala.analysis.CaseWhenClause; import org.apache.impala.analysis.CompoundPredicate; import org.apache.impala.analysis.Expr; import org.apache.impala.analysis.FunctionCallExpr; import org.apache.impala.analysis.FunctionName; import org.apache.impala.analysis.NullLiteral; import org.apache.impala.common.AnalysisException; import com.google.common.base.Preconditions; /*** * This rule simplifies conditional functions with constant conditions. It relies on * FoldConstantsRule to replace the constant conditions with a BoolLiteral or NullLiteral * first, and on NormalizeExprsRule to normalize CompoundPredicates. * * Examples: * if (true, 0, 1) -> 0 * id = 0 OR false -> id = 0 * false AND id = 1 -> false * case when false then 0 when true then 1 end -> 1 */ public class SimplifyConditionalsRule implements ExprRewriteRule { public static ExprRewriteRule INSTANCE = new SimplifyConditionalsRule(); @Override public Expr apply(Expr expr, Analyzer analyzer) throws AnalysisException { if (!expr.isAnalyzed()) return expr; if (expr instanceof FunctionCallExpr) { return simplifyFunctionCallExpr((FunctionCallExpr) expr); } else if (expr instanceof CompoundPredicate) { return simplifyCompoundPredicate((CompoundPredicate) expr); } else if (expr instanceof CaseExpr) { return simplifyCaseExpr((CaseExpr) expr, analyzer); } return expr; } /** * Simplifies IF by returning the corresponding child if the condition has a constant * TRUE, FALSE, or NULL (equivalent to FALSE) value. */ private Expr simplifyFunctionCallExpr(FunctionCallExpr expr) { FunctionName fnName = expr.getFnName(); // TODO: Add the other conditional functions, eg. ifnull, istrue, etc. if (fnName.getFunction().equals("if")) { Preconditions.checkState(expr.getChildren().size() == 3); if (expr.getChild(0) instanceof BoolLiteral) { if (((BoolLiteral) expr.getChild(0)).getValue()) { // IF(TRUE) return expr.getChild(1); } else { // IF(FALSE) return expr.getChild(2); } } else if (expr.getChild(0) instanceof NullLiteral) { // IF(NULL) return expr.getChild(2); } } return expr; } /** * Simplifies compound predicates with at least one BoolLiteral child, which * NormalizeExprsRule ensures will be the left child, according to the following rules: * true AND 'expr' -> 'expr' * false AND 'expr' -> false * true OR 'expr' -> true * false OR 'expr' -> 'expr' * * Unlike other rules here such as IF, we cannot in general simplify CompoundPredicates * with a NullLiteral child (unless the other child is a BoolLiteral), eg. null and * 'expr' is false if 'expr' is false but null if 'expr' is true. * * NOT is covered by FoldConstantRule. */ private Expr simplifyCompoundPredicate(CompoundPredicate expr) { Expr leftChild = expr.getChild(0); if (!(leftChild instanceof BoolLiteral)) return expr; if (expr.getOp() == CompoundPredicate.Operator.AND) { if (((BoolLiteral) leftChild).getValue()) { // TRUE AND 'expr', so return 'expr'. return expr.getChild(1); } else { // FALSE AND 'expr', so return FALSE. return leftChild; } } else if (expr.getOp() == CompoundPredicate.Operator.OR) { if (((BoolLiteral) leftChild).getValue()) { // TRUE OR 'expr', so return TRUE. return leftChild; } else { // FALSE OR 'expr', so return 'expr'. return expr.getChild(1); } } return expr; } /** * Simpilfies CASE and DECODE. If any of the 'when's have constant FALSE/NULL values, * they are removed. If all of the 'when's are removed, just the ELSE is returned. If * any of the 'when's have constant TRUE values, the leftmost one becomes the ELSE * clause and all following cases are removed. */ private Expr simplifyCaseExpr(CaseExpr expr, Analyzer analyzer) throws AnalysisException { Expr caseExpr = expr.hasCaseExpr() ? expr.getChild(0) : null; if (expr.hasCaseExpr() && !caseExpr.isLiteral()) return expr; int numChildren = expr.getChildren().size(); int loopStart = expr.hasCaseExpr() ? 1 : 0; // Check and return early if there's nothing that can be simplified. boolean canSimplify = false; for (int i = loopStart; i < numChildren - 1; i += 2) { if (expr.getChild(i).isLiteral()) { canSimplify = true; break; } } if (!canSimplify) return expr; // Contains all 'when' clauses with non-constant conditions, used to construct the new // CASE expr while removing any FALSE or NULL cases. List<CaseWhenClause> newWhenClauses = new ArrayList<CaseWhenClause>(); // Set to THEN of first constant TRUE clause, if any. Expr elseExpr = null; for (int i = loopStart; i < numChildren - 1; i += 2) { Expr child = expr.getChild(i); if (child instanceof NullLiteral) continue; Expr whenExpr; if (expr.hasCaseExpr()) { if (child.isLiteral()) { BinaryPredicate pred = new BinaryPredicate( BinaryPredicate.Operator.EQ, caseExpr, expr.getChild(i)); pred.analyze(analyzer); whenExpr = analyzer.getConstantFolder().rewrite(pred, analyzer); } else { whenExpr = null; } } else { whenExpr = child; } if (whenExpr instanceof BoolLiteral) { if (((BoolLiteral) whenExpr).getValue()) { if (newWhenClauses.size() == 0) { // This WHEN is always TRUE, and any cases preceding it are constant // FALSE/NULL, so just return its THEN. return expr.getChild(i + 1).castTo(expr.getType()); } else { // This WHEN is always TRUE, so the cases after it can never be reached. elseExpr = expr.getChild(i + 1); break; } } else { // This WHEN is always FALSE, so it can be removed. } } else { newWhenClauses.add(new CaseWhenClause(child, expr.getChild(i + 1))); } } if (expr.hasElseExpr() && elseExpr == null) elseExpr = expr.getChild(numChildren - 1); if (newWhenClauses.size() == 0) { // All of the WHEN clauses were FALSE, return the ELSE. if (elseExpr == null) return NullLiteral.create(expr.getType()); return elseExpr; } return new CaseExpr(caseExpr, newWhenClauses, elseExpr); } }
/* * ja, a Java-bytecode translator toolkit. * Copyright (C) 1999- Shigeru Chiba. All Rights Reserved. * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. Alternatively, the contents of this file may be used under * the terms of the GNU Lesser General Public License Version 2.1 or later, * or the Apache License Version 2.0. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. */ package ja.bytecode.analysis; /** * Represents the stack frame and local variable table at a particular point in * time. * * @author Jason T. Greene */ public class Frame { private Type[] locals; private Type[] stack; private int top; private boolean jsrMerged; private boolean retMerged; /** * Create a new frame with the specified local variable table size, and max * stack size * * @param locals * the number of local variable table entries * @param stack * the maximum stack size */ public Frame(int locals, int stack) { this.locals = new Type[locals]; this.stack = new Type[stack]; } /** * Returns the local varaible table entry at index. * * @param index * the position in the table * @return the type if one exists, or null if the position is empty */ public Type getLocal(int index) { return locals[index]; } /** * Sets the local variable table entry at index to a type. * * @param index * the position in the table * @param type * the type to set at the position */ public void setLocal(int index, Type type) { locals[index] = type; } /** * Returns the type on the stack at the specified index. * * @param index * the position on the stack * @return the type of the stack position */ public Type getStack(int index) { return stack[index]; } /** * Sets the type of the stack position * * @param index * the position on the stack * @param type * the type to set */ public void setStack(int index, Type type) { stack[index] = type; } /** * Empties the stack */ public void clearStack() { top = 0; } /** * Gets the index of the type sitting at the top of the stack. This is not * to be confused with a length operation which would return the number of * elements, not the position of the last element. * * @return the position of the element at the top of the stack */ public int getTopIndex() { return top - 1; } /** * Returns the number of local variable table entries, specified at * construction. * * @return the number of local variable table entries */ public int localsLength() { return locals.length; } /** * Gets the top of the stack without altering it * * @return the top of the stack */ public Type peek() { if (top < 1) throw new IndexOutOfBoundsException("Stack is empty"); return stack[top - 1]; } /** * Alters the stack to contain one less element and return it. * * @return the element popped from the stack */ public Type pop() { if (top < 1) throw new IndexOutOfBoundsException("Stack is empty"); return stack[--top]; } /** * Alters the stack by placing the passed type on the top * * @param type * the type to add to the top */ public void push(Type type) { stack[top++] = type; } /** * Makes a shallow copy of this frame, i.e. the type instances will remain * the same. * * @return the shallow copy */ public Frame copy() { Frame frame = new Frame(locals.length, stack.length); System.arraycopy(locals, 0, frame.locals, 0, locals.length); System.arraycopy(stack, 0, frame.stack, 0, stack.length); frame.top = top; return frame; } /** * Makes a shallow copy of the stack portion of this frame. The local * variable table size will be copied, but its contents will be empty. * * @return the shallow copy of the stack */ public Frame copyStack() { Frame frame = new Frame(locals.length, stack.length); System.arraycopy(stack, 0, frame.stack, 0, stack.length); frame.top = top; return frame; } /** * Merges all types on the stack of this frame instance with that of the * specified frame. The local variable table is left untouched. * * @param frame * the frame to merge the stack from * @return true if any changes where made */ public boolean mergeStack(Frame frame) { boolean changed = false; if (top != frame.top) throw new RuntimeException( "Operand stacks could not be merged, they are different sizes!"); for (int i = 0; i < top; i++) { if (stack[i] != null) { Type prev = stack[i]; Type merged = prev.merge(frame.stack[i]); if (merged == Type.BOGUS) throw new RuntimeException( "Operand stacks could not be merged due to differing primitive types: pos = " + i); stack[i] = merged; // always replace the instance in case a multi-interface type // changes to a normal Type if ((!merged.equals(prev)) || merged.popChanged()) { changed = true; } } } return changed; } /** * Merges all types on the stack and local variable table of this frame with * that of the specified type. * * @param frame * the frame to merge with * @return true if any changes to this frame where made by this merge */ public boolean merge(Frame frame) { boolean changed = false; // Local variable table for (int i = 0; i < locals.length; i++) { if (locals[i] != null) { Type prev = locals[i]; Type merged = prev.merge(frame.locals[i]); // always replace the instance in case a multi-interface type // changes to a normal Type locals[i] = merged; if (!merged.equals(prev) || merged.popChanged()) { changed = true; } } else if (frame.locals[i] != null) { locals[i] = frame.locals[i]; changed = true; } } changed |= mergeStack(frame); return changed; } public String toString() { StringBuffer buffer = new StringBuffer(); buffer.append("locals = ["); for (int i = 0; i < locals.length; i++) { buffer.append(locals[i] == null ? "empty" : locals[i].toString()); if (i < locals.length - 1) buffer.append(", "); } buffer.append("] stack = ["); for (int i = 0; i < top; i++) { buffer.append(stack[i]); if (i < top - 1) buffer.append(", "); } buffer.append("]"); return buffer.toString(); } /** * Whether or not state from the source JSR instruction has been merged * * @return true if JSR state has been merged */ boolean isJsrMerged() { return jsrMerged; } /** * Sets whether of not the state from the source JSR instruction has been * merged * * @param jsrMerged * true if merged, otherwise false */ void setJsrMerged(boolean jsrMerged) { this.jsrMerged = jsrMerged; } /** * Whether or not state from the RET instruction, of the subroutine that was * jumped to has been merged. * * @return true if RET state has been merged */ boolean isRetMerged() { return retMerged; } /** * Sets whether or not state from the RET instruction, of the subroutine * that was jumped to has been merged. * * @param retMerged * true if RET state has been merged */ void setRetMerged(boolean retMerged) { this.retMerged = retMerged; } }
/*-------------------------------------------------------- * Copyright (c) 2011, The Dojo Foundation * This software is distributed under the "Simplified BSD license", * the text of which is available at http://www.winktoolkit.org/licence.txt * or see the "license.txt" file for more details. *--------------------------------------------------------*/ /** * */ package com.orange.wink.parse; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.Reader; import java.util.Collection; import java.util.List; import org.mozilla.javascript.CompilerEnvirons; import org.mozilla.javascript.ErrorReporter; import org.mozilla.javascript.Parser; import org.mozilla.javascript.ScriptOrFnNode; import com.orange.wink.Constants; import com.orange.wink.ast.Ast; import com.orange.wink.ast.AstBuilder; import com.orange.wink.exception.WinkAstException; import com.orange.wink.exception.WinkParseException; import com.orange.wink.model.FunctionObject; import com.orange.wink.model.GlobalObject; import com.orange.wink.model.LiteralObject; import com.orange.wink.model.ScriptObject; import com.orange.wink.util.Common; import com.orange.wink.util.FileManager; import com.orange.wink.util.FileObject; import com.orange.wink.util.WinkJsFile; /** * @author Sylvain Lalande * */ public class WinkParser { /** * */ private final Parser parser; /** * */ private final CompilerEnvirons compilerEnv; /** * */ private final ErrorReporter errorReporter; /** * */ private final List<WinkJsFile> jsFiles; /** * */ private GlobalObject globalScope; /** * */ public WinkParser() { compilerEnv = new CompilerEnvirons(); errorReporter = compilerEnv.getErrorReporter(); parser = new Parser(compilerEnv, errorReporter); jsFiles = Common.newArrayList(1); } /** * @param files * @throws WinkAstException * @throws WinkParseException */ public void parse(final List<String> files) throws WinkAstException, WinkParseException { for (final String fname : files) { parse(fname); } if (getGlobalScope() != null) { populateFunctions(); populateLiterals(); } } /** * @param fileName * @throws WinkAstException * @throws WinkParseException */ private void parse(final String fileName) throws WinkAstException, WinkParseException { ScriptOrFnNode tree; Ast ast; final AstBuilder astBuilder = new AstBuilder(); try { tree = getParsedAst(fileName); ast = astBuilder.build(tree); // System.out.println(ast); } catch (final IOException e) { throw new WinkParseException(e); } addJsFile(fileName, new GlobalObject(ast)); } /** * @param fileName * @return * @throws IOException */ private ScriptOrFnNode getParsedAst(final String fileName) throws IOException { final File f = new File(fileName); final Reader reader = new FileReader(f); String sourceURI; ScriptOrFnNode tree = null; sourceURI = f.getCanonicalPath(); tree = parser.parse(reader, sourceURI, 1); reader.close(); return tree; } /** * @param filename * @param scope * @throws WinkParseException */ private void addJsFile(final String filename, final GlobalObject scope) throws WinkParseException { scope.setParent(getGlobalScope()); if (globalScope == null) { globalScope = scope; } if (Constants.optimDontKeepJsFile) { } else { jsFiles.add(new WinkJsFile(filename, scope)); } scope.setSourceName(filename); scope.interpret(); } /** * @param filename * @return */ private WinkJsFile getJsFile(final String filename) { for (final WinkJsFile jf : jsFiles) { if (jf.getFilename().equals(filename)) { return jf; } } return null; } /** * @return */ public GlobalObject getGlobalScope() { // if (jsFiles.size() > 0) { // return jsFiles.get(0).getScope(); // } return globalScope; } /** * @param scope * @throws WinkAstException * @throws WinkParseException */ private void populateFunctionR(final ScriptObject scope) throws WinkAstException, WinkParseException { final Collection<FunctionObject> fns = scope.getFunctions().values(); for (final FunctionObject f : fns) { final String sourceName = f.getGlobalScope().getSourceName(); f.setSourceName(sourceName); String fsource; try { if (Constants.optimDontKeepJsFile) { fsource = FileManager.getFileObject(sourceName).getLinesAsString(f.getLineStart(), f.getLineEnd()); } else { fsource = getJsFile(sourceName).getLinesAsString(f.getLineStart(), f.getLineEnd()); } ParserUtils.updateFunctionInfo(f, fsource); } catch (final IOException e) { throw new WinkParseException(e); } populateFunctionR(f); } final Collection<LiteralObject> lts = scope.getLiterals().values(); for (final LiteralObject l : lts) { populateFunctionR(l); } } /** * @throws WinkAstException * @throws WinkParseException */ private void populateFunctions() throws WinkAstException, WinkParseException { populateFunctionR(getGlobalScope()); } /** * @param scope * @throws WinkAstException * @throws WinkParseException */ private void populateLiteralsR(final ScriptObject scope) throws WinkAstException, WinkParseException { final Collection<LiteralObject> lts = scope.getLiterals().values(); for (final LiteralObject lt : lts) { final String sourceName = lt.getGlobalScope().getSourceName(); lt.setSourceName(sourceName); if (scope.getSourceName().equals(sourceName)) { if (lt.getLineStart() == -1) { lt.setLineStart(scope.getLineStart()); } } if (!lt.isVirtual()) { FileObject fo = null; WinkJsFile jf = null; String ltSource; int linesSize = -1; try { if (Constants.optimDontKeepJsFile) { fo = FileManager.getFileObject(sourceName); if ((lt.getLineEnd() == -1)) { linesSize = fo.getLines().size(); } } else { jf = getJsFile(sourceName); if ((lt.getLineEnd() == -1)) { linesSize = jf.getLines().size(); } } final int lns = (lt.getLineStart() == -1) ? 1 : lt.getLineStart(); final int lne = (lt.getLineEnd() == -1) ? linesSize : lt.getLineEnd(); if (Constants.optimDontKeepJsFile) { ltSource = fo.getLinesAsString(lns, lne); } else { ltSource = jf.getLinesAsString(lns, lne); } ParserUtils.updateLiteralLines(lt, ltSource, lns); if (lt.getLineStart() == -1 || lt.getLineEnd() == -1) { throw new WinkParseException("Bad literal lines [" + lt.getNamespace() + "] identified (L:" + lt.getLineStart() + ", " + lt.getLineEnd() + ")"); } String las; if (Constants.optimDontKeepJsFile) { las = fo.getLinesAsString(lt.getLineStart(), lt.getLineEnd()); } else { las = jf.getLinesAsString(lt.getLineStart(), lt.getLineEnd()); } ParserUtils.updateLiteralChars(lt, las); } catch (final IOException e) { throw new WinkParseException(e); } } populateLiteralsR(lt); } final Collection<FunctionObject> fns = scope.getFunctions().values(); for (final FunctionObject f : fns) { populateLiteralsR(f); } } /** * @throws WinkAstException * @throws WinkParseException */ private void populateLiterals() throws WinkAstException, WinkParseException { populateLiteralsR(getGlobalScope()); } /** * @param o * @throws WinkParseException */ private void printSource(final ScriptObject o) throws WinkParseException { if (!(o instanceof FunctionObject || o instanceof LiteralObject)) { return; } if (!(o instanceof GlobalObject)) { WinkJsFile jf; FileObject fo; String lines, source; try { if (Constants.optimDontKeepJsFile) { fo = FileManager.getFileObject(o.getSourceName()); lines = fo.getLinesAsString(o.getLineStart(), o.getLineEnd()); } else { jf = getJsFile(o.getSourceName()); lines = jf.getLinesAsString(o.getLineStart(), o.getLineEnd()); } System.out.println("------------ " + o); if (!o.isVirtual()) { source = lines.substring(o.getCharStart(), o.getCharEnd()); System.out.println(source); } System.out.println(); } catch (final IOException e) { throw new WinkParseException(e); } } final Collection<FunctionObject> fns = o.getFunctions().values(); for (final FunctionObject c : fns) { printSource(c); } final Collection<LiteralObject> lts = o.getLiterals().values(); for (final LiteralObject c : lts) { printSource(c); } } /** * @throws WinkParseException */ public void print() throws WinkParseException { final GlobalObject scope = getGlobalScope(); if (scope == null) { return; } System.out.println(scope.getAst().toStringInner()); System.out.println(scope.getAst()); // System.out.println("------------ FILES"); // for (final WinkJsFile jsfile : jsFiles) { // System.out.println("- " + jsfile.getFilename()); // } System.out.println("\n------------ MODEL"); System.out.println(scope.toStringRecursive(null, 0)); // System.out.println("\n------------ SOURCES"); // printSource(scope); } }
/* * JasperReports - Free Java Reporting Library. * Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved. * http://www.jaspersoft.com * * Unless you have purchased a commercial license agreement from Jaspersoft, * the following license terms apply: * * This program is part of JasperReports. * * JasperReports is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * JasperReports is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with JasperReports. If not, see <http://www.gnu.org/licenses/>. */ package net.sf.jasperreports.charts.design; import java.awt.Color; import net.sf.jasperreports.charts.base.JRBaseAreaPlot; import net.sf.jasperreports.charts.util.JRAxisFormat; import net.sf.jasperreports.engine.JRChart; import net.sf.jasperreports.engine.JRChartPlot; import net.sf.jasperreports.engine.JRConstants; import net.sf.jasperreports.engine.JRExpression; import net.sf.jasperreports.engine.JRFont; /** * @author Flavius Sana (flavius_sana@users.sourceforge.net) * @version $Id: JRDesignAreaPlot.java 7199 2014-08-27 13:58:10Z teodord $ */ public class JRDesignAreaPlot extends JRBaseAreaPlot implements JRDesignCategoryPlot { private static final long serialVersionUID = JRConstants.SERIAL_VERSION_UID; public static final String PROPERTY_CATEGORY_AXIS_LABEL_COLOR = "categoryAxisLabelColor"; public static final String PROPERTY_CATEGORY_AXIS_LABEL_FONT = "categoryAxisLabelFont"; public static final String PROPERTY_CATEGORY_AXIS_LINE_COLOR = "categoryAxisLineColor"; public static final String PROPERTY_CATEGORY_AXIS_TICK_LABEL_COLOR = "categoryAxisTickLabelColor"; public static final String PROPERTY_CATEGORY_AXIS_TICK_LABEL_FONT = "categoryAxisTickLabelFont"; public static final String PROPERTY_CATEGORY_AXIS_TICK_LABEL_MASK = "categoryAxisTickLabelMask"; public static final String PROPERTY_CATEGORY_AXIS_VERTICAL_TICK_LABELS = "categoryAxisVerticalTickLabels"; public static final String PROPERTY_VALUE_AXIS_LABEL_COLOR = "valueAxisLabelColor"; public static final String PROPERTY_VALUE_AXIS_LABEL_FONT = "valueAxisLabelFont"; public static final String PROPERTY_VALUE_AXIS_LINE_COLOR = "valueAxisLineColor"; public static final String PROPERTY_VALUE_AXIS_TICK_LABEL_COLOR = "valueAxisTickLabelColor"; public static final String PROPERTY_VALUE_AXIS_TICK_LABEL_FONT = "valueAxisTickLabelFont"; public static final String PROPERTY_VALUE_AXIS_TICK_LABEL_MASK = "valueAxisTickLabelMask"; public static final String PROPERTY_VALUE_AXIS_VERTICAL_TICK_LABELS = "valueAxisVerticalTickLabels"; /** * */ public JRDesignAreaPlot(JRChartPlot plot, JRChart chart) { super(plot, chart); } /** * */ public void setCategoryAxisLabelExpression(JRExpression categoryAxisLabelExpression) { Object old = this.categoryAxisLabelExpression; this.categoryAxisLabelExpression = categoryAxisLabelExpression; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_LABEL_EXPRESSION, old, this.categoryAxisLabelExpression); } /** * */ public void setCategoryAxisLabelFont(JRFont categoryAxisLabelFont) { Object old = this.categoryAxisLabelFont; this.categoryAxisLabelFont = categoryAxisLabelFont; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_LABEL_FONT, old, this.categoryAxisLabelFont); } /** * */ public void setCategoryAxisLabelColor(Color categoryAxisLabelColor)//FIXMECHART move the setters of axis format in interface and base class { Object old = this.categoryAxisLabelColor; this.categoryAxisLabelColor = categoryAxisLabelColor; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_LABEL_COLOR, old, this.categoryAxisLabelColor); } /** * */ public void setCategoryAxisTickLabelFont(JRFont categoryAxisTickLabelFont) { Object old = this.categoryAxisTickLabelFont; this.categoryAxisTickLabelFont = categoryAxisTickLabelFont; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_TICK_LABEL_FONT, old, this.categoryAxisTickLabelFont); } /** * */ public void setCategoryAxisTickLabelColor(Color categoryAxisTickLabelColor) { Object old = this.categoryAxisTickLabelColor; this.categoryAxisTickLabelColor = categoryAxisTickLabelColor; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_TICK_LABEL_COLOR, old, this.categoryAxisTickLabelColor); } /** * */ public void setCategoryAxisTickLabelMask(String categoryAxisTickLabelMask) { Object old = this.categoryAxisTickLabelMask; this.categoryAxisTickLabelMask = categoryAxisTickLabelMask; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_TICK_LABEL_MASK, old, this.categoryAxisTickLabelMask); } /** * */ public void setCategoryAxisVerticalTickLabels(Boolean categoryAxisVerticalTickLabels) { Object old = this.categoryAxisVerticalTickLabels; this.categoryAxisVerticalTickLabels = categoryAxisVerticalTickLabels; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_VERTICAL_TICK_LABELS, old, this.categoryAxisVerticalTickLabels); } /** * */ public void setCategoryAxisLineColor(Color categoryAxisLineColor) { Object old = this.categoryAxisLineColor; this.categoryAxisLineColor = categoryAxisLineColor; getEventSupport().firePropertyChange(PROPERTY_CATEGORY_AXIS_LINE_COLOR, old, this.categoryAxisLineColor); } /** * */ public void setValueAxisLabelExpression(JRExpression valueAxisLabelExpression) { Object old = this.valueAxisLabelExpression; this.valueAxisLabelExpression = valueAxisLabelExpression; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_LABEL_EXPRESSION, old, this.valueAxisLabelExpression); } /** * */ public void setDomainAxisMinValueExpression(JRExpression domainAxisMinValueExpression) { Object old = this.domainAxisMinValueExpression; this.domainAxisMinValueExpression = domainAxisMinValueExpression; getEventSupport().firePropertyChange(PROPERTY_DOMAIN_AXIS_MINVALUE_EXPRESSION, old, this.domainAxisMinValueExpression); } /** * */ public void setDomainAxisMaxValueExpression(JRExpression domainAxisMaxValueExpression) { Object old = this.domainAxisMaxValueExpression; this.domainAxisMaxValueExpression = domainAxisMaxValueExpression; getEventSupport().firePropertyChange(PROPERTY_DOMAIN_AXIS_MAXVALUE_EXPRESSION, old, this.domainAxisMaxValueExpression); } /** * */ public void setRangeAxisMinValueExpression(JRExpression rangeAxisMinValueExpression) { Object old = this.rangeAxisMinValueExpression; this.rangeAxisMinValueExpression = rangeAxisMinValueExpression; getEventSupport().firePropertyChange(PROPERTY_RANGE_AXIS_MINVALUE_EXPRESSION, old, this.rangeAxisMinValueExpression); } /** * */ public void setRangeAxisMaxValueExpression(JRExpression rangeAxisMaxValueExpression) { Object old = this.rangeAxisMaxValueExpression; this.rangeAxisMaxValueExpression = rangeAxisMaxValueExpression; getEventSupport().firePropertyChange(PROPERTY_RANGE_AXIS_MAXVALUE_EXPRESSION, old, this.rangeAxisMaxValueExpression); } /** * */ public void setValueAxisLabelFont(JRFont valueAxisLabelFont) { Object old = this.valueAxisLabelFont; this.valueAxisLabelFont = valueAxisLabelFont; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_LABEL_FONT, old, this.valueAxisLabelFont); } /** * */ public void setValueAxisLabelColor(Color valueAxisLabelColor) { Object old = this.valueAxisLabelColor; this.valueAxisLabelColor = valueAxisLabelColor; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_LABEL_COLOR, old, this.valueAxisLabelColor); } /** * */ public void setValueAxisTickLabelFont(JRFont valueAxisTickLabelFont) { Object old = this.valueAxisTickLabelFont; this.valueAxisTickLabelFont = valueAxisTickLabelFont; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_TICK_LABEL_FONT, old, this.valueAxisTickLabelFont); } /** * */ public void setValueAxisTickLabelColor(Color valueAxisTickLabelColor) { Object old = this.valueAxisTickLabelColor; this.valueAxisTickLabelColor = valueAxisTickLabelColor; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_TICK_LABEL_COLOR, old, this.valueAxisTickLabelColor); } /** * */ public void setValueAxisTickLabelMask(String valueAxisTickLabelMask) { Object old = this.valueAxisTickLabelMask; this.valueAxisTickLabelMask = valueAxisTickLabelMask; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_TICK_LABEL_MASK, old, this.valueAxisTickLabelMask); } /** * */ public void setValueAxisVerticalTickLabels(Boolean valueAxisVerticalTickLabels) { Object old = this.valueAxisVerticalTickLabels; this.valueAxisVerticalTickLabels = valueAxisVerticalTickLabels; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_VERTICAL_TICK_LABELS, old, this.valueAxisVerticalTickLabels); } /** * */ public void setValueAxisLineColor(Color valueAxisLineColor) { Object old = this.valueAxisLineColor; this.valueAxisLineColor = valueAxisLineColor; getEventSupport().firePropertyChange(PROPERTY_VALUE_AXIS_LINE_COLOR, old, this.valueAxisLineColor); } /** * */ public void setCategoryAxisFormat(JRAxisFormat axisFormat) { setCategoryAxisLabelFont(axisFormat.getLabelFont()); setCategoryAxisLabelColor(axisFormat.getLabelColor()); setCategoryAxisTickLabelFont(axisFormat.getTickLabelFont()); setCategoryAxisTickLabelColor(axisFormat.getTickLabelColor()); setCategoryAxisTickLabelMask(axisFormat.getTickLabelMask()); setCategoryAxisVerticalTickLabels(axisFormat.getVerticalTickLabels()); setCategoryAxisLineColor(axisFormat.getLineColor()); } /** * */ public void setValueAxisFormat(JRAxisFormat axisFormat) { setValueAxisLabelFont(axisFormat.getLabelFont()); setValueAxisLabelColor(axisFormat.getLabelColor()); setValueAxisTickLabelFont(axisFormat.getTickLabelFont()); setValueAxisTickLabelColor(axisFormat.getTickLabelColor()); setValueAxisTickLabelMask(axisFormat.getTickLabelMask()); setValueAxisVerticalTickLabels(axisFormat.getVerticalTickLabels()); setValueAxisLineColor(axisFormat.getLineColor()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.afp; import java.awt.Point; import java.io.IOException; import java.io.ObjectInputStream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xmlgraphics.java2d.color.ColorConverter; import org.apache.xmlgraphics.java2d.color.DefaultColorConverter; import org.apache.fop.afp.fonts.AFPPageFonts; import org.apache.fop.util.AbstractPaintingState; /** * This keeps information about the current painting state when writing to an * AFP datastream. */ public class AFPPaintingState extends org.apache.fop.util.AbstractPaintingState { private static final long serialVersionUID = 8206711712452344473L; private static Log log = LogFactory.getLog("org.apache.xmlgraphics.afp"); /** the portrait rotation */ private int portraitRotation; /** the landscape rotation */ private int landscapeRotation = 270; /** color image support */ private boolean colorImages; /** dithering quality setting (0.0f..1.0f) */ private float ditheringQuality; /** image encoding quality setting (0.0f..1.0f) */ private float bitmapEncodingQuality; /** color image handler */ private transient ColorConverter colorConverter; /** * true if certain image formats may be embedded unchanged in their native * format. */ private boolean nativeImagesSupported; private boolean canEmbedJpeg; /** * true if CMYK images (requires IOCA FS45 suppport on the target platform) * may be generated */ private boolean cmykImagesSupported; /** default value for image depth */ private int bitsPerPixel = 8; /** the output resolution */ private int resolution = 240; // 240 dpi /** * A configurable value to correct the line width so that the output matches the expected. Different * devices may need different values. */ private float lineWidthCorrection = AFPConstants.LINE_WIDTH_CORRECTION; /** determines whether GOCA is enabled or disabled */ private boolean gocaEnabled = true; /** determines whether to stroke text in GOCA mode or to use text operators where possible */ private boolean strokeGocaText; /** use page segment with F11 and F45 images*/ private boolean pSeg; private boolean gocaPSeg; /** use FS45 images*/ private boolean fs45; /** the current page */ private transient AFPPagePaintingState pagePaintingState; // /** reference orientation */ // private int orientation = 0; /** a unit converter */ private final transient AFPUnitConverter unitConv; public AFPPaintingState() { colorConverter = GrayScaleColorConverter.getInstance(); pagePaintingState = new AFPPagePaintingState(); unitConv = new AFPUnitConverter(this); } private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException { ois.defaultReadObject(); } /** * Sets the rotation to be used for portrait pages, valid values are 0 * (default), 90, 180, 270. * * @param rotation * The rotation in degrees. */ public void setPortraitRotation(int rotation) { if (rotation == 0 || rotation == 90 || rotation == 180 || rotation == 270) { portraitRotation = rotation; } else { throw new IllegalArgumentException("The portrait rotation must be one" + " of the values 0, 90, 180, 270"); } } /** * Returns the rotation to be used for portrait pages * * @return the rotation to be used for portrait pages */ protected int getPortraitRotation() { return this.portraitRotation; } /** * Sets the rotation to be used for landscape pages, valid values are 0, 90, * 180, 270 (default). * * @param rotation * The rotation in degrees. */ public void setLandscapeRotation(int rotation) { if (rotation == 0 || rotation == 90 || rotation == 180 || rotation == 270) { landscapeRotation = rotation; } else { throw new IllegalArgumentException("The landscape rotation must be one" + " of the values 0, 90, 180, 270"); } } /** * Returns the landscape rotation * * @return the landscape rotation */ protected int getLandscapeRotation() { return this.landscapeRotation; } /** * Sets the number of bits used per pixel * * @param bitsPerPixel * number of bits per pixel */ public void setBitsPerPixel(int bitsPerPixel) { switch (bitsPerPixel) { case 1: case 4: case 8: this.bitsPerPixel = bitsPerPixel; break; default: log.warn("Invalid bits_per_pixel value, must be 1, 4 or 8."); this.bitsPerPixel = 8; break; } } /** * Returns the number of bits per pixel * * @return the number of bits per pixel */ public int getBitsPerPixel() { return this.bitsPerPixel; } /** * Sets whether images are color or not and instantiates a ColorHandler * * @param colorImages * color image output */ public void setColorImages(boolean colorImages) { this.colorImages = colorImages; if (colorImages) { this.colorConverter = DefaultColorConverter.getInstance(); } } /** * Returns true if color images are to be used * * @return true if color images are to be used */ public boolean isColorImages() { return this.colorImages; } /** * Used to convert color in respect of the colorImages flag * * @return the color converter */ public ColorConverter getColorConverter() { return this.colorConverter; } /** * Sets whether images are natively supported or not in the AFP environment * * @param nativeImagesSupported * true if images are natively supported in this AFP environment */ public void setNativeImagesSupported(boolean nativeImagesSupported) { this.nativeImagesSupported = nativeImagesSupported; } /** * Returns true if images are supported natively in this AFP environment * * @return true if images are supported natively in this AFP environment */ public boolean isNativeImagesSupported() { return this.nativeImagesSupported; } /** * Set whether or not JPEG images can be embedded within an AFP document. * * @param canEmbed true if the JPEG image can be embedded */ public void setCanEmbedJpeg(boolean canEmbed) { canEmbedJpeg = canEmbed; } /** * Returns true if JPEGs can be embedded in an AFP document. * * @return true if JPEG embedding is allowed */ public boolean canEmbedJpeg() { return canEmbedJpeg; } /** * Controls whether CMYK images (IOCA FS45) are enabled. By default, support * is disabled for wider compatibility. When disabled, any CMYK image is * converted to the selected color format. * * @param value * true to enabled CMYK images */ public void setCMYKImagesSupported(boolean value) { this.cmykImagesSupported = value; } /** * Indicates whether CMYK images (IOCA FS45) are enabled. * * @return true if IOCA FS45 is enabled */ public boolean isCMYKImagesSupported() { return this.cmykImagesSupported; } /** * Gets the dithering quality setting to use when converting images to monochrome images. * @return the dithering quality (a value between 0.0f and 1.0f) */ public float getDitheringQuality() { return this.ditheringQuality; } /** * Sets the dithering quality setting to use when converting images to monochrome images. * @param quality Defines the desired quality level for the conversion. * Valid values: a value between 0.0f (fastest) and 1.0f (best) */ public void setDitheringQuality(float quality) { quality = Math.max(quality, 0.0f); quality = Math.min(quality, 1.0f); this.ditheringQuality = quality; } /** * Gets the image encoding quality setting to use when encoding bitmap images. * @return the encoding quality (a value between 0.0f and 1.0f, 1.0 meaning loss-less) */ public float getBitmapEncodingQuality() { return this.bitmapEncodingQuality; } /** * Sets the image encoding quality setting to use when encoding bitmap images. * @param quality Defines the desired quality level for the conversion. * Valid values: a value between 0.0f (lowest) and 1.0f (best, loss-less) */ public void setBitmapEncodingQuality(float quality) { quality = Math.max(quality, 0.0f); quality = Math.min(quality, 1.0f); this.bitmapEncodingQuality = quality; } /** * Sets the output/device resolution * * @param resolution * the output resolution (dpi) */ public void setResolution(int resolution) { if (log.isDebugEnabled()) { log.debug("renderer-resolution set to: " + resolution + "dpi"); } this.resolution = resolution; } /** * Sets the line width correction * * @param correction the line width multiplying factor correction */ public void setLineWidthCorrection(float correction) { if (log.isDebugEnabled()) { log.debug("line width correction set to: " + correction); } this.lineWidthCorrection = correction; } /** * Returns the output/device resolution. * * @return the resolution in dpi */ public int getResolution() { return this.resolution; } /** * Returns the line width correction. * @return the correction */ public float getLineWidthCorrection() { return this.lineWidthCorrection; } /** * Controls whether GOCA is enabled or disabled. * @param enabled true if GOCA is enabled, false if it is disabled */ public void setGOCAEnabled(boolean enabled) { this.gocaEnabled = enabled; } /** * Indicates whether GOCA is enabled or disabled. * @return true if GOCA is enabled, false if GOCA is disabled */ public boolean isGOCAEnabled() { return this.gocaEnabled; } /** * Controls whether to stroke text in GOCA mode or to use text operators where possible. * @param stroke true to stroke, false to paint with text operators where possible */ public void setStrokeGOCAText(boolean stroke) { this.strokeGocaText = stroke; } /** * Indicates whether to stroke text in GOCA mode or to use text operators where possible. * @return true to stroke, false to paint with text operators where possible */ public boolean isStrokeGOCAText() { return this.strokeGocaText; } /** * Whether FS11 and SF45 non-inline images should be wrapped in a page segment * @return true iff images should be wrapped */ public boolean getWrapPSeg() { return pSeg; } /** * Sets whether FS11 and FS45 non-inline images should be wrapped in a page segment * @param pSeg true iff images should be wrapped */ public void setWrapPSeg(boolean pSeg) { this.pSeg = pSeg; } public boolean getWrapGocaPSeg() { return gocaPSeg; } public void setWrapGocaPSeg(boolean pSeg) { this.gocaPSeg = pSeg; } /** * gets whether images should be FS45 * @return true iff images should be FS45 */ public boolean getFS45() { return fs45; } /** * sets whether images should be FS45 * @param fs45 true iff images should be FS45 */ public void setFS45(boolean fs45) { this.fs45 = fs45; } /** {@inheritDoc} */ @Override protected AbstractData instantiateData() { return new AFPData(); } /** {@inheritDoc} */ @Override protected AbstractPaintingState instantiate() { return new AFPPaintingState(); } /** * Returns the painting state of the current page * * @return the painting state of the current page */ protected AFPPagePaintingState getPagePaintingState() { return this.pagePaintingState; } /** * Gets the current page fonts * * @return the current page fonts */ public AFPPageFonts getPageFonts() { return pagePaintingState.getFonts(); } /** * Sets the page width * * @param pageWidth * the page width */ public void setPageWidth(int pageWidth) { pagePaintingState.setWidth(pageWidth); } /** * Returns the page width * * @return the page width */ public int getPageWidth() { return pagePaintingState.getWidth(); } /** * Sets the page height * * @param pageHeight * the page height */ public void setPageHeight(int pageHeight) { pagePaintingState.setHeight(pageHeight); } /** * Returns the page height * * @return the page height */ public int getPageHeight() { return pagePaintingState.getHeight(); } /** * Returns the page rotation * * @return the page rotation */ public int getPageRotation() { return pagePaintingState.getOrientation(); } /** * Sets the uri of the current image * * @param uri * the uri of the current image */ public void setImageUri(String uri) { ((AFPData) getData()).imageUri = uri; } /** * Gets the uri of the current image * * @return the uri of the current image */ public String getImageUri() { return ((AFPData) getData()).imageUri; } /** * Returns the currently derived rotation * * @return the currently derived rotation */ public int getRotation() { return getData().getDerivedRotation(); } /** * Returns the unit converter * * @return the unit converter */ public AFPUnitConverter getUnitConverter() { return this.unitConv; } /** * Returns a point on the current page, taking the current painting state * into account. * * @param x * the X-coordinate * @param y * the Y-coordinate * @return a point on the current page */ public Point getPoint(int x, int y) { Point p = new Point(); int rotation = getRotation(); switch (rotation) { case 90: p.x = y; p.y = getPageWidth() - x; break; case 180: p.x = getPageWidth() - x; p.y = getPageHeight() - y; break; case 270: p.x = getPageHeight() - y; p.y = x; break; default: p.x = x; p.y = y; break; } return p; } /** {@inheritDoc} */ @Override public Object clone() { AFPPaintingState paintingState = (AFPPaintingState) super.clone(); paintingState.pagePaintingState = (AFPPagePaintingState) this.pagePaintingState.clone(); paintingState.portraitRotation = this.portraitRotation; paintingState.landscapeRotation = this.landscapeRotation; paintingState.bitsPerPixel = this.bitsPerPixel; paintingState.colorImages = this.colorImages; paintingState.colorConverter = this.colorConverter; paintingState.resolution = this.resolution; return paintingState; } /** {@inheritDoc} */ @Override public String toString() { return "AFPPaintingState{" + "portraitRotation=" + portraitRotation + ", landscapeRotation=" + landscapeRotation + ", colorImages=" + colorImages + ", bitsPerPixel=" + bitsPerPixel + ", resolution=" + resolution + ", pageState=" + pagePaintingState + super.toString() + "}"; } /** * Page level state data */ private class AFPPagePaintingState implements Cloneable { /** page width */ private int width; /** page height */ private int height; /** page fonts */ private AFPPageFonts fonts = new AFPPageFonts(); /** page font count */ private int fontCount; /** page orientation */ private int orientation; /** * Returns the page width * * @return the page width */ protected int getWidth() { return width; } /** * Sets the page width * * @param width * the page width */ protected void setWidth(int width) { this.width = width; } /** * Returns the page height * * @return the page height */ protected int getHeight() { return height; } /** * Sets the page height * * @param height * the page height */ protected void setHeight(int height) { this.height = height; } /** * Returns the page fonts * * @return the page fonts */ protected AFPPageFonts getFonts() { return fonts; } /** * Sets the current page fonts * * @param fonts * the current page fonts */ protected void setFonts(AFPPageFonts fonts) { this.fonts = fonts; } /** * Increments and returns the current page font count * * @return increment and return the current page font count */ protected int incrementFontCount() { return ++fontCount; } /** * Returns the current page orientation * * @return the current page orientation */ protected int getOrientation() { return orientation; } /** * Sets the current page orientation * * @param orientation * the current page orientation */ protected void setOrientation(int orientation) { this.orientation = orientation; } /** {@inheritDoc} */ @Override public Object clone() { AFPPagePaintingState state = new AFPPagePaintingState(); state.width = this.width; state.height = this.height; state.orientation = this.orientation; state.fonts = new AFPPageFonts(this.fonts); state.fontCount = this.fontCount; return state; } /** {@inheritDoc} */ @Override public String toString() { return "AFPPagePaintingState{width=" + width + ", height=" + height + ", orientation=" + orientation + ", fonts=" + fonts + ", fontCount=" + fontCount + "}"; } } /** * Block level state data */ // @SuppressFBWarnings("SE_INNER_CLASS") private class AFPData extends org.apache.fop.util.AbstractPaintingState.AbstractData { private static final long serialVersionUID = -1789481244175275686L; /** The current fill status */ private boolean filled; private String imageUri; /** {@inheritDoc} */ @Override public Object clone() { AFPData obj = (AFPData) super.clone(); obj.filled = this.filled; obj.imageUri = this.imageUri; return obj; } /** {@inheritDoc} */ @Override public String toString() { return "AFPData{" + super.toString() + ", filled=" + filled + ", imageUri=" + imageUri + "}"; } /** {@inheritDoc} */ @Override protected AbstractData instantiate() { return new AFPData(); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.raptor.legacy.storage; import io.airlift.log.Logger; import io.prestosql.plugin.raptor.legacy.util.Closer; import io.prestosql.plugin.raptor.legacy.util.SyncingFileSystem; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.io.orc.OrcFile; import org.apache.hadoop.hive.ql.io.orc.OrcFile.WriterOptions; import org.apache.hadoop.hive.ql.io.orc.OrcStruct; import org.apache.hadoop.hive.ql.io.orc.Reader; import org.apache.hadoop.hive.ql.io.orc.RecordReader; import org.apache.hadoop.hive.ql.io.orc.Writer; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.orc.NullMemoryManager; import java.io.File; import java.io.IOException; import java.io.InterruptedIOException; import java.nio.ByteBuffer; import java.util.BitSet; import java.util.List; import java.util.Map; import static io.airlift.slice.SizeOf.SIZE_OF_BYTE; import static io.airlift.slice.SizeOf.SIZE_OF_DOUBLE; import static io.airlift.slice.SizeOf.SIZE_OF_LONG; import static io.airlift.units.Duration.nanosSince; import static io.prestosql.plugin.raptor.legacy.util.Closer.closer; import static java.lang.Math.toIntExact; import static org.apache.hadoop.hive.ql.io.orc.OrcFile.createReader; import static org.apache.hadoop.hive.ql.io.orc.OrcFile.createWriter; import static org.apache.hadoop.hive.ql.io.orc.OrcUtil.getFieldValue; public final class OrcFileRewriter { private static final Logger log = Logger.get(OrcFileRewriter.class); private static final Configuration CONFIGURATION = new Configuration(); private OrcFileRewriter() {} public static OrcFileInfo rewrite(File input, File output, BitSet rowsToDelete) throws IOException { try (FileSystem fileSystem = new SyncingFileSystem(CONFIGURATION)) { Reader reader = createReader(fileSystem, path(input)); if (reader.getNumberOfRows() < rowsToDelete.length()) { throw new IOException("File has fewer rows than deletion vector"); } int deleteRowCount = rowsToDelete.cardinality(); if (reader.getNumberOfRows() == deleteRowCount) { return new OrcFileInfo(0, 0); } if (reader.getNumberOfRows() >= Integer.MAX_VALUE) { throw new IOException("File has too many rows"); } int inputRowCount = toIntExact(reader.getNumberOfRows()); WriterOptions writerOptions = OrcFile.writerOptions(CONFIGURATION) .memory(new NullMemoryManager()) .fileSystem(fileSystem) .compress(reader.getCompression()) .inspector(reader.getObjectInspector()); long start = System.nanoTime(); try (Closer<RecordReader, IOException> recordReader = closer(reader.rows(), RecordReader::close); Closer<Writer, IOException> writer = closer(createWriter(path(output), writerOptions), Writer::close)) { if (reader.hasMetadataValue(OrcFileMetadata.KEY)) { ByteBuffer orcFileMetadata = reader.getMetadataValue(OrcFileMetadata.KEY); writer.get().addUserMetadata(OrcFileMetadata.KEY, orcFileMetadata); } OrcFileInfo fileInfo = rewrite(recordReader.get(), writer.get(), rowsToDelete, inputRowCount); log.debug("Rewrote file %s in %s (input rows: %s, output rows: %s)", input.getName(), nanosSince(start), inputRowCount, inputRowCount - deleteRowCount); return fileInfo; } } } private static OrcFileInfo rewrite(RecordReader reader, Writer writer, BitSet rowsToDelete, int inputRowCount) throws IOException { Object object = null; int row = 0; long rowCount = 0; long uncompressedSize = 0; row = rowsToDelete.nextClearBit(row); if (row < inputRowCount) { reader.seekToRow(row); } while (row < inputRowCount) { if (Thread.currentThread().isInterrupted()) { throw new InterruptedIOException(); } // seekToRow() is extremely expensive if (reader.getRowNumber() < row) { reader.next(object); continue; } object = reader.next(object); writer.addRow(object); rowCount++; uncompressedSize += uncompressedSize(object); row = rowsToDelete.nextClearBit(row + 1); } return new OrcFileInfo(rowCount, uncompressedSize); } private static Path path(File input) { return new Path(input.toURI()); } private static int uncompressedSize(Object object) throws IOException { if (object instanceof OrcStruct) { OrcStruct struct = (OrcStruct) object; int size = 0; for (int i = 0; i < struct.getNumFields(); i++) { size += uncompressedSize(getFieldValue(struct, i)); } return size; } if ((object == null) || (object instanceof BooleanWritable)) { return SIZE_OF_BYTE; } if (object instanceof LongWritable) { return SIZE_OF_LONG; } if (object instanceof DoubleWritable) { return SIZE_OF_DOUBLE; } if (object instanceof HiveDecimalWritable) { return SIZE_OF_LONG; } if (object instanceof Text) { return ((Text) object).getLength(); } if (object instanceof BytesWritable) { return ((BytesWritable) object).getLength(); } if (object instanceof List<?>) { int size = 0; for (Object element : (Iterable<?>) object) { size += uncompressedSize(element); } return size; } if (object instanceof Map<?, ?>) { int size = 0; for (Map.Entry<?, ?> entry : ((Map<?, ?>) object).entrySet()) { size += uncompressedSize(entry.getKey()); size += uncompressedSize(entry.getValue()); } return size; } throw new IOException("Unhandled ORC object: " + object.getClass().getName()); } public static class OrcFileInfo { private final long rowCount; private final long uncompressedSize; public OrcFileInfo(long rowCount, long uncompressedSize) { this.rowCount = rowCount; this.uncompressedSize = uncompressedSize; } public long getRowCount() { return rowCount; } public long getUncompressedSize() { return uncompressedSize; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.resourcemanager.cosmos.implementation; import com.azure.core.http.rest.PagedFlux; import com.azure.core.util.CoreUtils; import com.azure.resourcemanager.cosmos.CosmosManager; import com.azure.resourcemanager.cosmos.fluent.models.DatabaseAccountGetResultsInner; import com.azure.resourcemanager.cosmos.fluent.models.PrivateEndpointConnectionInner; import com.azure.resourcemanager.cosmos.models.Capability; import com.azure.resourcemanager.cosmos.models.ConnectorOffer; import com.azure.resourcemanager.cosmos.models.ConsistencyPolicy; import com.azure.resourcemanager.cosmos.models.CosmosDBAccount; import com.azure.resourcemanager.cosmos.models.DatabaseAccountCreateUpdateParameters; import com.azure.resourcemanager.cosmos.models.DatabaseAccountKind; import com.azure.resourcemanager.cosmos.models.DatabaseAccountListConnectionStringsResult; import com.azure.resourcemanager.cosmos.models.DatabaseAccountListKeysResult; import com.azure.resourcemanager.cosmos.models.DatabaseAccountListReadOnlyKeysResult; import com.azure.resourcemanager.cosmos.models.DatabaseAccountOfferType; import com.azure.resourcemanager.cosmos.models.DatabaseAccountRegenerateKeyParameters; import com.azure.resourcemanager.cosmos.models.DatabaseAccountUpdateParameters; import com.azure.resourcemanager.cosmos.models.DefaultConsistencyLevel; import com.azure.resourcemanager.cosmos.models.FailoverPolicy; import com.azure.resourcemanager.cosmos.models.IpAddressOrRange; import com.azure.resourcemanager.cosmos.models.KeyKind; import com.azure.resourcemanager.cosmos.models.Location; import com.azure.resourcemanager.cosmos.models.PrivateEndpointConnection; import com.azure.resourcemanager.cosmos.models.PrivateLinkResource; import com.azure.resourcemanager.cosmos.models.PrivateLinkServiceConnectionStateProperty; import com.azure.resourcemanager.cosmos.models.RegionForOnlineOffline; import com.azure.resourcemanager.cosmos.models.SqlDatabase; import com.azure.resourcemanager.cosmos.models.VirtualNetworkRule; import com.azure.core.management.Region; import com.azure.resourcemanager.resources.fluentcore.arm.models.PrivateEndpointServiceConnectionStatus; import com.azure.resourcemanager.resources.fluentcore.arm.models.implementation.GroupableResourceImpl; import com.azure.resourcemanager.resources.fluentcore.utils.ResourceManagerUtils; import reactor.core.publisher.Mono; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import com.azure.resourcemanager.resources.fluentcore.utils.PagedConverter; /** The implementation for CosmosDBAccount. */ class CosmosDBAccountImpl extends GroupableResourceImpl<CosmosDBAccount, DatabaseAccountGetResultsInner, CosmosDBAccountImpl, CosmosManager> implements CosmosDBAccount, CosmosDBAccount.Definition, CosmosDBAccount.Update { private List<FailoverPolicy> failoverPolicies; private boolean hasFailoverPolicyChanges; private static final int MAX_DELAY_DUE_TO_MISSING_FAILOVERS = 60 * 10; private Map<String, VirtualNetworkRule> virtualNetworkRulesMap; private PrivateEndpointConnectionsImpl privateEndpointConnections; CosmosDBAccountImpl(String name, DatabaseAccountGetResultsInner innerObject, CosmosManager manager) { super(fixDBName(name), innerObject, manager); this.failoverPolicies = new ArrayList<>(); this.privateEndpointConnections = new PrivateEndpointConnectionsImpl(this.manager().serviceClient().getPrivateEndpointConnections(), this); } @Override public DatabaseAccountKind kind() { return this.innerModel().kind(); } @Override public String documentEndpoint() { return this.innerModel().documentEndpoint(); } @Override public DatabaseAccountOfferType databaseAccountOfferType() { return this.innerModel().databaseAccountOfferType(); } @Override public String ipRangeFilter() { if (CoreUtils.isNullOrEmpty(ipRules())) { return null; } return this.ipRules().stream().map(IpAddressOrRange::ipAddressOrRange).collect(Collectors.joining(",")); } @Override public List<IpAddressOrRange> ipRules() { return Collections.unmodifiableList(this.innerModel().ipRules()); } @Override public ConsistencyPolicy consistencyPolicy() { return this.innerModel().consistencyPolicy(); } @Override public DefaultConsistencyLevel defaultConsistencyLevel() { if (this.innerModel().consistencyPolicy() == null) { throw new RuntimeException("Consistency policy is missing!"); } return this.innerModel().consistencyPolicy().defaultConsistencyLevel(); } @Override public List<Location> writableReplications() { return this.innerModel().writeLocations(); } @Override public List<Location> readableReplications() { return this.innerModel().readLocations(); } @Override public DatabaseAccountListKeysResult listKeys() { return this.listKeysAsync().block(); } @Override public Mono<DatabaseAccountListKeysResult> listKeysAsync() { return this .manager() .serviceClient() .getDatabaseAccounts() .listKeysAsync(this.resourceGroupName(), this.name()) .map( DatabaseAccountListKeysResultImpl::new); } @Override public DatabaseAccountListReadOnlyKeysResult listReadOnlyKeys() { return this.listReadOnlyKeysAsync().block(); } @Override public Mono<DatabaseAccountListReadOnlyKeysResult> listReadOnlyKeysAsync() { return this .manager() .serviceClient() .getDatabaseAccounts() .listReadOnlyKeysAsync(this.resourceGroupName(), this.name()) .map( DatabaseAccountListReadOnlyKeysResultImpl::new); } @Override public DatabaseAccountListConnectionStringsResult listConnectionStrings() { return this.listConnectionStringsAsync().block(); } @Override public Mono<DatabaseAccountListConnectionStringsResult> listConnectionStringsAsync() { return this .manager() .serviceClient() .getDatabaseAccounts() .listConnectionStringsAsync(this.resourceGroupName(), this.name()) .map( DatabaseAccountListConnectionStringsResultImpl::new); } @Override public List<SqlDatabase> listSqlDatabases() { return this.listSqlDatabasesAsync().collectList().block(); } @Override public PagedFlux<SqlDatabase> listSqlDatabasesAsync() { return PagedConverter.mapPage(this .manager() .serviceClient() .getSqlResources() .listSqlDatabasesAsync(this.resourceGroupName(), this.name()), SqlDatabaseImpl::new); } @Override public List<PrivateLinkResource> listPrivateLinkResources() { return this.listPrivateLinkResourcesAsync().collectList().block(); } @Override public PagedFlux<PrivateLinkResource> listPrivateLinkResourcesAsync() { return PagedConverter.mapPage(this .manager() .serviceClient() .getPrivateLinkResources() .listByDatabaseAccountAsync(this.resourceGroupName(), this.name()), PrivateLinkResourceImpl::new); } @Override public PrivateLinkResource getPrivateLinkResource(String groupName) { return this.getPrivateLinkResourceAsync(groupName).block(); } @Override public Mono<PrivateLinkResource> getPrivateLinkResourceAsync(String groupName) { return this .manager() .serviceClient() .getPrivateLinkResources() .getAsync(this.resourceGroupName(), this.name(), groupName) .map(PrivateLinkResourceImpl::new); } @Override public Map<String, PrivateEndpointConnection> listPrivateEndpointConnection() { return this.listPrivateEndpointConnectionAsync().block(); } @Override public Mono<Map<String, PrivateEndpointConnection>> listPrivateEndpointConnectionAsync() { return this.privateEndpointConnections.asMapAsync(); } @Override public PrivateEndpointConnection getPrivateEndpointConnection(String name) { return this.getPrivateEndpointConnectionAsync(name).block(); } @Override public Mono<PrivateEndpointConnection> getPrivateEndpointConnectionAsync(String name) { return this .privateEndpointConnections .getImplAsync(name) .map(privateEndpointConnection -> privateEndpointConnection); } @Override public boolean multipleWriteLocationsEnabled() { return this.innerModel().enableMultipleWriteLocations(); } @Override public boolean cassandraConnectorEnabled() { return this.innerModel().enableCassandraConnector(); } @Override public ConnectorOffer cassandraConnectorOffer() { return this.innerModel().connectorOffer(); } @Override public boolean keyBasedMetadataWriteAccessDisabled() { return this.innerModel().disableKeyBasedMetadataWriteAccess(); } @Override public List<Capability> capabilities() { List<Capability> capabilities = this.innerModel().capabilities(); if (capabilities == null) { capabilities = new ArrayList<>(); } return Collections.unmodifiableList(capabilities); } @Override public List<VirtualNetworkRule> virtualNetworkRules() { List<VirtualNetworkRule> result = (this.innerModel() != null && this.innerModel().virtualNetworkRules() != null) ? this.innerModel().virtualNetworkRules() : new ArrayList<VirtualNetworkRule>(); return Collections.unmodifiableList(result); } @Override public void offlineRegion(Region region) { this.manager().serviceClient().getDatabaseAccounts().offlineRegion(this.resourceGroupName(), this.name(), new RegionForOnlineOffline().withRegion(region.label())); } @Override public Mono<Void> offlineRegionAsync(Region region) { return this .manager() .serviceClient() .getDatabaseAccounts() .offlineRegionAsync(this.resourceGroupName(), this.name(), new RegionForOnlineOffline().withRegion(region.label())); } @Override public void onlineRegion(Region region) { this.manager().serviceClient().getDatabaseAccounts().onlineRegion(this.resourceGroupName(), this.name(), new RegionForOnlineOffline().withRegion(region.label())); } @Override public Mono<Void> onlineRegionAsync(Region region) { return this .manager() .serviceClient() .getDatabaseAccounts() .onlineRegionAsync(this.resourceGroupName(), this.name(), new RegionForOnlineOffline().withRegion(region.label())); } @Override public void regenerateKey(KeyKind keyKind) { this.manager().serviceClient().getDatabaseAccounts().regenerateKey(this.resourceGroupName(), this.name(), new DatabaseAccountRegenerateKeyParameters().withKeyKind(keyKind)); } @Override public Mono<Void> regenerateKeyAsync(KeyKind keyKind) { return this .manager() .serviceClient() .getDatabaseAccounts() .regenerateKeyAsync(this.resourceGroupName(), this.name(), new DatabaseAccountRegenerateKeyParameters().withKeyKind(keyKind)); } @Override public CosmosDBAccountImpl withKind(DatabaseAccountKind kind) { this.innerModel().withKind(kind); return this; } @Override public CosmosDBAccountImpl withKind(DatabaseAccountKind kind, Capability... capabilities) { this.innerModel().withKind(kind); this.innerModel().withCapabilities(Arrays.asList(capabilities)); return this; } @Override public CosmosDBAccountImpl withDataModelSql() { this.innerModel().withKind(DatabaseAccountKind.GLOBAL_DOCUMENT_DB); return this; } @Override public CosmosDBAccountImpl withDataModelMongoDB() { this.innerModel().withKind(DatabaseAccountKind.MONGO_DB); return this; } @Override public CosmosDBAccountImpl withDataModelCassandra() { this.innerModel().withKind(DatabaseAccountKind.GLOBAL_DOCUMENT_DB); List<Capability> capabilities = new ArrayList<Capability>(); capabilities.add(new Capability().withName("EnableCassandra")); this.innerModel().withCapabilities(capabilities); this.withTag("defaultExperience", "Cassandra"); return this; } @Override public CosmosDBAccountImpl withDataModelAzureTable() { this.innerModel().withKind(DatabaseAccountKind.GLOBAL_DOCUMENT_DB); List<Capability> capabilities = new ArrayList<Capability>(); capabilities.add(new Capability().withName("EnableTable")); this.innerModel().withCapabilities(capabilities); this.withTag("defaultExperience", "Table"); return this; } @Override public CosmosDBAccountImpl withDataModelGremlin() { this.innerModel().withKind(DatabaseAccountKind.GLOBAL_DOCUMENT_DB); List<Capability> capabilities = new ArrayList<Capability>(); capabilities.add(new Capability().withName("EnableGremlin")); this.innerModel().withCapabilities(capabilities); this.withTag("defaultExperience", "Graph"); return this; } @Override public CosmosDBAccountImpl withIpRangeFilter(String ipRangeFilter) { List<IpAddressOrRange> rules = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(ipRangeFilter)) { for (String ip : ipRangeFilter.split(",")) { rules.add(new IpAddressOrRange().withIpAddressOrRange(ip)); } } this.innerModel().withIpRules(rules); return this; } @Override public CosmosDBAccountImpl withIpRules(List<IpAddressOrRange> ipRules) { this.innerModel().withIpRules(ipRules); return this; } @Override protected Mono<DatabaseAccountGetResultsInner> getInnerAsync() { return this.manager().serviceClient().getDatabaseAccounts().getByResourceGroupAsync(this.resourceGroupName(), this.name()); } @Override public CosmosDBAccountImpl withWriteReplication(Region region) { FailoverPolicy failoverPolicyInner = new FailoverPolicy(); failoverPolicyInner.withLocationName(region.name()); this.hasFailoverPolicyChanges = true; this.failoverPolicies.add(failoverPolicyInner); return this; } @Override public CosmosDBAccountImpl withReadReplication(Region region) { this.ensureFailoverIsInitialized(); FailoverPolicy failoverPolicyInner = new FailoverPolicy(); failoverPolicyInner.withLocationName(region.name()); failoverPolicyInner.withFailoverPriority(this.failoverPolicies.size()); this.hasFailoverPolicyChanges = true; this.failoverPolicies.add(failoverPolicyInner); return this; } @Override public CosmosDBAccountImpl withoutReadReplication(Region region) { this.ensureFailoverIsInitialized(); for (int i = 1; i < this.failoverPolicies.size(); i++) { if (this.failoverPolicies.get(i).locationName() != null) { String locName = formatLocationName(this.failoverPolicies.get(i).locationName()); if (locName.equals(region.name())) { this.failoverPolicies.remove(i); } } } return this; } @Override public CosmosDBAccountImpl withEventualConsistency() { this.setConsistencyPolicy(DefaultConsistencyLevel.EVENTUAL, 0, 0); return this; } @Override public CosmosDBAccountImpl withSessionConsistency() { this.setConsistencyPolicy(DefaultConsistencyLevel.SESSION, 0, 0); return this; } @Override public CosmosDBAccountImpl withBoundedStalenessConsistency(long maxStalenessPrefix, int maxIntervalInSeconds) { this.setConsistencyPolicy(DefaultConsistencyLevel.BOUNDED_STALENESS, maxStalenessPrefix, maxIntervalInSeconds); return this; } @Override public CosmosDBAccountImpl withStrongConsistency() { this.setConsistencyPolicy(DefaultConsistencyLevel.STRONG, 0, 0); return this; } @Override public PrivateEndpointConnectionImpl defineNewPrivateEndpointConnection(String name) { return this.privateEndpointConnections.define(name); } @Override public PrivateEndpointConnectionImpl updatePrivateEndpointConnection(String name) { return this.privateEndpointConnections.update(name); } @Override public CosmosDBAccountImpl withoutPrivateEndpointConnection(String name) { this.privateEndpointConnections.remove(name); return this; } CosmosDBAccountImpl withPrivateEndpointConnection(PrivateEndpointConnectionImpl privateEndpointConnection) { this.privateEndpointConnections.addPrivateEndpointConnection(privateEndpointConnection); return this; } @Override public Mono<CosmosDBAccount> createResourceAsync() { return this.doDatabaseUpdateCreate(); } private DatabaseAccountCreateUpdateParameters createUpdateParametersInner(DatabaseAccountGetResultsInner inner) { this.ensureFailoverIsInitialized(); DatabaseAccountCreateUpdateParameters createUpdateParametersInner = new DatabaseAccountCreateUpdateParameters(); createUpdateParametersInner.withLocation(this.regionName().toLowerCase(Locale.ROOT)); createUpdateParametersInner.withConsistencyPolicy(inner.consistencyPolicy()); // createUpdateParametersInner.withDatabaseAccountOfferType( // DatabaseAccountOfferType.STANDARD.toString()); // Enum to Constant createUpdateParametersInner.withIpRules(inner.ipRules()); createUpdateParametersInner.withKind(inner.kind()); createUpdateParametersInner.withCapabilities(inner.capabilities()); createUpdateParametersInner.withTags(inner.tags()); createUpdateParametersInner.withEnableMultipleWriteLocations(inner.enableMultipleWriteLocations()); this .addLocationsForParameters( new CreateUpdateLocationParameters(createUpdateParametersInner), this.failoverPolicies); createUpdateParametersInner.withIsVirtualNetworkFilterEnabled(inner.isVirtualNetworkFilterEnabled()); createUpdateParametersInner.withEnableCassandraConnector(inner.enableCassandraConnector()); createUpdateParametersInner.withConnectorOffer(inner.connectorOffer()); createUpdateParametersInner.withEnableAutomaticFailover(inner.enableAutomaticFailover()); createUpdateParametersInner.withDisableKeyBasedMetadataWriteAccess(inner.disableKeyBasedMetadataWriteAccess()); if (this.virtualNetworkRulesMap != null) { createUpdateParametersInner .withVirtualNetworkRules(new ArrayList<VirtualNetworkRule>(this.virtualNetworkRulesMap.values())); this.virtualNetworkRulesMap = null; } return createUpdateParametersInner; } private DatabaseAccountUpdateParameters updateParametersInner(DatabaseAccountGetResultsInner inner) { this.ensureFailoverIsInitialized(); DatabaseAccountUpdateParameters updateParameters = new DatabaseAccountUpdateParameters(); updateParameters.withTags(inner.tags()); updateParameters.withLocation(this.regionName().toLowerCase(Locale.ROOT)); updateParameters.withConsistencyPolicy(inner.consistencyPolicy()); updateParameters.withIpRules(inner.ipRules()); updateParameters.withIsVirtualNetworkFilterEnabled(inner.isVirtualNetworkFilterEnabled()); updateParameters.withEnableAutomaticFailover(inner.enableAutomaticFailover()); updateParameters.withCapabilities(inner.capabilities()); updateParameters.withEnableMultipleWriteLocations(inner.enableMultipleWriteLocations()); updateParameters.withEnableCassandraConnector(inner.enableCassandraConnector()); updateParameters.withConnectorOffer(inner.connectorOffer()); updateParameters.withDisableKeyBasedMetadataWriteAccess(inner.disableKeyBasedMetadataWriteAccess()); if (virtualNetworkRulesMap != null) { updateParameters.withVirtualNetworkRules(new ArrayList<>(this.virtualNetworkRulesMap.values())); virtualNetworkRulesMap = null; } this.addLocationsForParameters(new UpdateLocationParameters(updateParameters), this.failoverPolicies); return updateParameters; } private static String fixDBName(String name) { return name.toLowerCase(Locale.ROOT); } private void setConsistencyPolicy( DefaultConsistencyLevel level, long maxStalenessPrefix, int maxIntervalInSeconds) { ConsistencyPolicy policy = new ConsistencyPolicy(); policy.withDefaultConsistencyLevel(level); if (level == DefaultConsistencyLevel.BOUNDED_STALENESS) { policy.withMaxStalenessPrefix(maxStalenessPrefix); policy.withMaxIntervalInSeconds(maxIntervalInSeconds); } this.innerModel().withConsistencyPolicy(policy); } private void addLocationsForParameters(HasLocations locationParameters, List<FailoverPolicy> failoverPolicies) { List<Location> locations = new ArrayList<Location>(); if (failoverPolicies.size() > 0) { for (int i = 0; i < failoverPolicies.size(); i++) { FailoverPolicy policyInner = failoverPolicies.get(i); Location location = new Location(); location.withFailoverPriority(i); location.withLocationName(policyInner.locationName()); locations.add(location); } } else { Location location = new Location(); location.withFailoverPriority(0); location.withLocationName(locationParameters.location()); locations.add(location); } locationParameters.withLocations(locations); } private static String formatLocationName(String locationName) { return locationName.replace(" ", "").toLowerCase(Locale.ROOT); } private Mono<CosmosDBAccount> doDatabaseUpdateCreate() { final CosmosDBAccountImpl self = this; final List<Integer> data = new ArrayList<Integer>(); data.add(0); Mono<DatabaseAccountGetResultsInner> request = null; HasLocations locationParameters = null; if (isInCreateMode()) { final DatabaseAccountCreateUpdateParameters createUpdateParametersInner = this.createUpdateParametersInner(this.innerModel()); request = this .manager() .serviceClient() .getDatabaseAccounts() .createOrUpdateAsync(resourceGroupName(), name(), createUpdateParametersInner); locationParameters = new CreateUpdateLocationParameters(createUpdateParametersInner); } else { final DatabaseAccountUpdateParameters updateParametersInner = this.updateParametersInner(this.innerModel()); request = this .manager() .serviceClient() .getDatabaseAccounts() .updateAsync(resourceGroupName(), name(), updateParametersInner); locationParameters = new UpdateLocationParameters(updateParametersInner); } Set<String> locations = locationParameters.locations().stream() .map(location -> formatLocationName(location.locationName())) .collect(Collectors.toSet()); return request .flatMap( databaseAccountInner -> { self.failoverPolicies.clear(); self.hasFailoverPolicyChanges = false; return manager() .databaseAccounts() .getByResourceGroupAsync(resourceGroupName(), name()) .flatMap( databaseAccount -> { if (MAX_DELAY_DUE_TO_MISSING_FAILOVERS > data.get(0) && (databaseAccount.id() == null || databaseAccount.id().length() == 0 || locations.size() != databaseAccount.innerModel().failoverPolicies().size())) { return Mono.empty(); } if (isAFinalProvisioningState(databaseAccount.innerModel().provisioningState())) { for (Location location : databaseAccount.readableReplications()) { if (!isAFinalProvisioningState(location.provisioningState())) { return Mono.empty(); } if (!locations.contains(formatLocationName(location.locationName()))) { return Mono.empty(); } } } else { return Mono.empty(); } self.setInner(databaseAccount.innerModel()); return Mono.just(databaseAccount); }) .repeatWhenEmpty( longFlux -> longFlux .flatMap( index -> { data.set(0, data.get(0) + 30); return Mono.delay(ResourceManagerUtils.InternalRuntimeContext.getDelayDuration( manager().serviceClient().getDefaultPollInterval())); })); }); } private void ensureFailoverIsInitialized() { if (this.isInCreateMode()) { return; } if (!this.hasFailoverPolicyChanges) { this.failoverPolicies.clear(); FailoverPolicy[] policyInners = new FailoverPolicy[this.innerModel().failoverPolicies().size()]; this.innerModel().failoverPolicies().toArray(policyInners); Arrays .sort( policyInners, Comparator.comparing(FailoverPolicy::failoverPriority)); for (int i = 0; i < policyInners.length; i++) { this.failoverPolicies.add(policyInners[i]); } this.hasFailoverPolicyChanges = true; } } private boolean isAFinalProvisioningState(String state) { switch (state.toLowerCase(Locale.ROOT)) { case "succeeded": case "canceled": case "failed": return true; default: return false; } } private Map<String, VirtualNetworkRule> ensureVirtualNetworkRules() { if (this.virtualNetworkRulesMap == null) { this.virtualNetworkRulesMap = new HashMap<>(); if (this.innerModel() != null && this.innerModel().virtualNetworkRules() != null) { for (VirtualNetworkRule virtualNetworkRule : this.innerModel().virtualNetworkRules()) { this.virtualNetworkRulesMap.put(virtualNetworkRule.id(), virtualNetworkRule); } } } return this.virtualNetworkRulesMap; } @Override public CosmosDBAccountImpl withVirtualNetwork(String virtualNetworkId, String subnetName) { this.innerModel().withIsVirtualNetworkFilterEnabled(true); String vnetId = virtualNetworkId + "/subnets/" + subnetName; ensureVirtualNetworkRules().put(vnetId, new VirtualNetworkRule().withId(vnetId)); return this; } @Override public CosmosDBAccountImpl withoutVirtualNetwork(String virtualNetworkId, String subnetName) { Map<String, VirtualNetworkRule> vnetRules = ensureVirtualNetworkRules(); vnetRules.remove(virtualNetworkId + "/subnets/" + subnetName); if (vnetRules.size() == 0) { this.innerModel().withIsVirtualNetworkFilterEnabled(false); } return this; } @Override public CosmosDBAccountImpl withVirtualNetworkRules(List<VirtualNetworkRule> virtualNetworkRules) { Map<String, VirtualNetworkRule> vnetRules = ensureVirtualNetworkRules(); if (virtualNetworkRules == null || virtualNetworkRules.isEmpty()) { vnetRules.clear(); this.innerModel().withIsVirtualNetworkFilterEnabled(false); return this; } this.innerModel().withIsVirtualNetworkFilterEnabled(true); for (VirtualNetworkRule vnetRule : virtualNetworkRules) { this.virtualNetworkRulesMap.put(vnetRule.id(), vnetRule); } return this; } @Override public CosmosDBAccountImpl withMultipleWriteLocationsEnabled(boolean enabled) { this.innerModel().withEnableMultipleWriteLocations(enabled); return this; } @Override public CosmosDBAccountImpl withCassandraConnector(ConnectorOffer connectorOffer) { this.innerModel().withEnableCassandraConnector(true); this.innerModel().withConnectorOffer(connectorOffer); return this; } @Override public CosmosDBAccountImpl withoutCassandraConnector() { this.innerModel().withEnableCassandraConnector(false); this.innerModel().withConnectorOffer(null); return this; } @Override public CosmosDBAccountImpl withDisableKeyBaseMetadataWriteAccess(boolean disabled) { this.innerModel().withDisableKeyBasedMetadataWriteAccess(disabled); return this; } @Override public void approvePrivateEndpointConnection(String privateEndpointConnectionName) { approvePrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> approvePrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return manager().serviceClient().getPrivateEndpointConnections().createOrUpdateAsync( resourceGroupName(), name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionStateProperty() .withStatus(PrivateEndpointServiceConnectionStatus.APPROVED.toString()))) .then(); } @Override public void rejectPrivateEndpointConnection(String privateEndpointConnectionName) { rejectPrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> rejectPrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return manager().serviceClient().getPrivateEndpointConnections().createOrUpdateAsync( resourceGroupName(), name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionStateProperty() .withStatus(PrivateEndpointServiceConnectionStatus.REJECTED.toString()))) .then(); } interface HasLocations { String location(); List<Location> locations(); void withLocations(List<Location> locations); } static class CreateUpdateLocationParameters implements HasLocations { private DatabaseAccountCreateUpdateParameters parameters; CreateUpdateLocationParameters(DatabaseAccountCreateUpdateParameters parametersObject) { parameters = parametersObject; } @Override public String location() { return parameters.location(); } @Override public List<Location> locations() { return parameters.locations(); } @Override public void withLocations(List<Location> locations) { parameters.withLocations(locations); } } static class UpdateLocationParameters implements HasLocations { private DatabaseAccountUpdateParameters parameters; UpdateLocationParameters(DatabaseAccountUpdateParameters parametersObject) { parameters = parametersObject; } @Override public String location() { return parameters.location(); } @Override public List<Location> locations() { return parameters.locations(); } @Override public void withLocations(List<Location> locations) { parameters.withLocations(locations); } } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.event.listener; import com.facebook.buck.artifact_cache.ArtifactCacheConnectEvent; import com.facebook.buck.artifact_cache.ArtifactCacheEvent; import com.facebook.buck.cli.CommandEvent; import com.facebook.buck.util.perf.PerfStatsTracking; import com.facebook.buck.event.ActionGraphEvent; import com.facebook.buck.event.ArtifactCompressionEvent; import com.facebook.buck.event.BuckEvent; import com.facebook.buck.event.BuckEventListener; import com.facebook.buck.event.ChromeTraceEvent; import com.facebook.buck.event.CompilerPluginDurationEvent; import com.facebook.buck.event.InstallEvent; import com.facebook.buck.event.SimplePerfEvent; import com.facebook.buck.event.StartActivityEvent; import com.facebook.buck.event.TraceEvent; import com.facebook.buck.event.UninstallEvent; import com.facebook.buck.io.PathListing; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.json.ParseBuckFileEvent; import com.facebook.buck.jvm.java.AnnotationProcessingEvent; import com.facebook.buck.jvm.java.tracing.JavacPhaseEvent; import com.facebook.buck.log.CommandThreadFactory; import com.facebook.buck.log.InvocationInfo; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildId; import com.facebook.buck.parser.ParseEvent; import com.facebook.buck.rules.BuildEvent; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleEvent; import com.facebook.buck.rules.TestSummaryEvent; import com.facebook.buck.step.StepEvent; import com.facebook.buck.timing.Clock; import com.facebook.buck.util.BestCompressionGZIPOutputStream; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.Optionals; import com.facebook.buck.util.concurrent.MostExecutors; import com.facebook.buck.util.unit.SizeUnit; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.CaseFormat; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.eventbus.Subscribe; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Path; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import java.util.TimeZone; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; /** * Logs events to a json file formatted to be viewed in Chrome Trace View (chrome://tracing). */ public class ChromeTraceBuildListener implements BuckEventListener { private static final LoadingCache<String, String> CONVERTED_EVENT_ID_CACHE = CacheBuilder .newBuilder() .weakValues() .build(new CacheLoader<String, String>() { @Override public String load(String key) throws Exception { return CaseFormat .UPPER_CAMEL .converterTo(CaseFormat.LOWER_UNDERSCORE) .convert(key) .intern(); } }); private static final Logger LOG = Logger.get(ChromeTraceBuildListener.class); private static final int TIMEOUT_SECONDS = 30; private final ProjectFilesystem projectFilesystem; private final Clock clock; private final int tracesToKeep; private final boolean compressTraces; private final ObjectMapper mapper; private final ThreadLocal<SimpleDateFormat> dateFormat; private final Path tracePath; private final OutputStream traceStream; private final JsonGenerator jsonGenerator; private final InvocationInfo invocationInfo; private final ExecutorService outputExecutor; public ChromeTraceBuildListener( ProjectFilesystem projectFilesystem, InvocationInfo invocationInfo, Clock clock, ObjectMapper objectMapper, int tracesToKeep, boolean compressTraces) throws IOException { this( projectFilesystem, invocationInfo, clock, objectMapper, Locale.US, TimeZone.getDefault(), tracesToKeep, compressTraces); } @VisibleForTesting ChromeTraceBuildListener( ProjectFilesystem projectFilesystem, InvocationInfo invocationInfo, Clock clock, ObjectMapper objectMapper, final Locale locale, final TimeZone timeZone, int tracesToKeep, boolean compressTraces) throws IOException { this.invocationInfo = invocationInfo; this.projectFilesystem = projectFilesystem; this.clock = clock; this.mapper = objectMapper; this.dateFormat = new ThreadLocal<SimpleDateFormat>() { @Override protected SimpleDateFormat initialValue() { SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd.HH-mm-ss", locale); dateFormat.setTimeZone(timeZone); return dateFormat; } }; this.tracesToKeep = tracesToKeep; this.compressTraces = compressTraces; this.outputExecutor = MostExecutors.newSingleThreadExecutor( new CommandThreadFactory(getClass().getName())); TracePathAndStream tracePathAndStream = createPathAndStream(invocationInfo); this.tracePath = tracePathAndStream.getPath(); this.traceStream = tracePathAndStream.getStream(); this.jsonGenerator = objectMapper.getFactory().createGenerator(this.traceStream); this.jsonGenerator.writeStartArray(); addProcessMetadataEvent(); } @VisibleForTesting Path getTracePath() { return tracePath; } private void addProcessMetadataEvent() { submitTraceEvent( new ChromeTraceEvent( "buck", "process_name", ChromeTraceEvent.Phase.METADATA, /* processId */ 0, /* threadId */ 0, /* microTime */ 0, /* microThreadUserTime */ 0, ImmutableMap.of("name", "buck"))); } @VisibleForTesting void deleteOldTraces() { if (!projectFilesystem.exists(invocationInfo.getLogDirectoryPath())) { return; } Path traceDirectory = projectFilesystem.getPathForRelativePath( invocationInfo.getLogDirectoryPath()); try { for (Path path : PathListing.listMatchingPathsWithFilters( traceDirectory, "build.*.trace", PathListing.GET_PATH_MODIFIED_TIME, PathListing.FilterMode.EXCLUDE, Optional.of(tracesToKeep), Optional.<Long>absent())) { projectFilesystem.deleteFileAtPath(path); } } catch (IOException e) { LOG.error(e, "Couldn't list paths in trace directory %s", traceDirectory); } } private TracePathAndStream createPathAndStream(InvocationInfo invocationInfo) { String filenameTime = dateFormat.get().format(new Date(clock.currentTimeMillis())); String traceName = String.format("build.%s.%s.trace", filenameTime, invocationInfo.getBuildId()); if (compressTraces) { traceName = traceName + ".gz"; } Path tracePath = invocationInfo.getLogDirectoryPath().resolve(traceName); try { projectFilesystem.createParentDirs(tracePath); OutputStream stream = projectFilesystem.newFileOutputStream(tracePath); if (compressTraces) { stream = new BestCompressionGZIPOutputStream(stream, true); } return new TracePathAndStream(tracePath, stream); } catch (IOException e) { throw new HumanReadableException(e, "Unable to write trace file: " + e); } } @Override public void outputTrace(BuildId buildId) { try { LOG.debug("Writing Chrome trace to %s", tracePath); outputExecutor.shutdown(); try { if (!outputExecutor.awaitTermination(TIMEOUT_SECONDS, TimeUnit.SECONDS)) { LOG.warn("Failed to log buck trace %s. Trace might be corrupt", tracePath); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } jsonGenerator.writeEndArray(); jsonGenerator.close(); traceStream.close(); String symlinkName = compressTraces ? "build.trace.gz" : "build.trace"; Path symlinkPath = projectFilesystem.getBuckPaths().getLogDir().resolve(symlinkName); projectFilesystem.createSymLink( projectFilesystem.resolve(symlinkPath), projectFilesystem.resolve(tracePath), true); deleteOldTraces(); } catch (IOException e) { throw new HumanReadableException(e, "Unable to write trace file: " + e); } } @Subscribe public void commandStarted(CommandEvent.Started started) { writeChromeTraceEvent("buck", started.getCommandName(), ChromeTraceEvent.Phase.BEGIN, ImmutableMap.of( "command_args", Joiner.on(' ').join(started.getArgs()) ), started); } @Subscribe public void commandFinished(CommandEvent.Finished finished) { writeChromeTraceEvent("buck", finished.getCommandName(), ChromeTraceEvent.Phase.END, ImmutableMap.of( "command_args", Joiner.on(' ').join(finished.getArgs()), "daemon", Boolean.toString(finished.isDaemon())), finished); } @Subscribe public void buildStarted(BuildEvent.Started started) { writeChromeTraceEvent("buck", "build", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public synchronized void buildFinished(BuildEvent.Finished finished) { writeChromeTraceEvent("buck", "build", ChromeTraceEvent.Phase.END, ImmutableMap.<String, String>of(), finished); } @Subscribe public void ruleStarted(BuildRuleEvent.Started started) { BuildRule buildRule = started.getBuildRule(); writeChromeTraceEvent("buck", buildRule.getFullyQualifiedName(), ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public void ruleFinished(BuildRuleEvent.Finished finished) { writeChromeTraceEvent("buck", finished.getBuildRule().getFullyQualifiedName(), ChromeTraceEvent.Phase.END, ImmutableMap.of( "cache_result", finished.getCacheResult().toString().toLowerCase(), "success_type", finished.getSuccessType().transform(Functions.toStringFunction()).or("failed") ), finished); } @Subscribe public void ruleResumed(BuildRuleEvent.Resumed resumed) { BuildRule buildRule = resumed.getBuildRule(); writeChromeTraceEvent( "buck", buildRule.getFullyQualifiedName(), ChromeTraceEvent.Phase.BEGIN, ImmutableMap.of("rule_key", resumed.getRuleKey()), resumed); } @Subscribe public void ruleSuspended(BuildRuleEvent.Suspended suspended) { BuildRule buildRule = suspended.getBuildRule(); writeChromeTraceEvent("buck", buildRule.getFullyQualifiedName(), ChromeTraceEvent.Phase.END, ImmutableMap.of("rule_key", suspended.getRuleKey()), suspended); } @Subscribe public void stepStarted(StepEvent.Started started) { writeChromeTraceEvent("buck", started.getShortStepName(), ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public void stepFinished(StepEvent.Finished finished) { writeChromeTraceEvent("buck", finished.getShortStepName(), ChromeTraceEvent.Phase.END, ImmutableMap.of( "description", finished.getDescription(), "exit_code", Integer.toString(finished.getExitCode())), finished); } @Subscribe public void parseStarted(ParseEvent.Started started) { writeChromeTraceEvent("buck", "parse", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public void parseFinished(ParseEvent.Finished finished) { writeChromeTraceEvent("buck", "parse", ChromeTraceEvent.Phase.END, ImmutableMap.of( "targets", Joiner.on(",").join(finished.getBuildTargets())), finished); } @Subscribe public void simplePerfEvent(SimplePerfEvent perfEvent) { ChromeTraceEvent.Phase phase = null; switch (perfEvent.getEventType()) { case STARTED: phase = ChromeTraceEvent.Phase.BEGIN; break; case FINISHED: phase = ChromeTraceEvent.Phase.END; break; case UPDATED: phase = ChromeTraceEvent.Phase.IMMEDIATE; break; } if (phase == null) { throw new IllegalStateException( "Unsupported perf event type: " + perfEvent.getEventType()); } try { writeChromeTraceEvent( "buck", CONVERTED_EVENT_ID_CACHE.get(perfEvent.getEventId().getValue().intern()), phase, ImmutableMap.copyOf( Maps.transformValues(perfEvent.getEventInfo(), Functions.toStringFunction())), perfEvent); } catch (ExecutionException e) { LOG.warn("Unable to log perf event " + perfEvent, e); } } @Subscribe public void parseBuckFileStarted(ParseBuckFileEvent.Started started) { writeChromeTraceEvent( "buck", "parse_file", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.of( "path", started.getBuckFilePath().toString()), started); } @Subscribe public void parseBuckFileFinished(ParseBuckFileEvent.Finished finished) { writeChromeTraceEvent( "buck", "parse_file", ChromeTraceEvent.Phase.END, ImmutableMap.of( "path", finished.getBuckFilePath().toString(), "num_rules", Integer.toString(finished.getNumRules()), "python_profile", finished.getProfile()), finished); } @Subscribe public void actionGraphStarted(ActionGraphEvent.Started started) { writeChromeTraceEvent( "buck", "action_graph", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public void actionGraphFinished(ActionGraphEvent.Finished finished) { writeChromeTraceEvent( "buck", "action_graph", ChromeTraceEvent.Phase.END, ImmutableMap.<String, String>of(), finished); } @Subscribe public void installStarted(InstallEvent.Started started) { writeChromeTraceEvent("buck", "install", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public void installFinished(InstallEvent.Finished finished) { writeChromeTraceEvent("buck", "install", ChromeTraceEvent.Phase.END, ImmutableMap.of( "target", finished.getBuildTarget().getFullyQualifiedName(), "success", Boolean.toString(finished.isSuccess())), finished); } @Subscribe public void startActivityStarted(StartActivityEvent.Started started) { writeChromeTraceEvent("buck", "start_activity", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public void startActivityFinished(StartActivityEvent.Finished finished) { writeChromeTraceEvent("buck", "start_activity", ChromeTraceEvent.Phase.END, ImmutableMap.of( "target", finished.getBuildTarget().getFullyQualifiedName(), "activity_name", finished.getActivityName(), "success", Boolean.toString(finished.isSuccess())), finished); } @Subscribe public void uninstallStarted(UninstallEvent.Started started) { writeChromeTraceEvent("buck", "uninstall", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public void uninstallFinished(UninstallEvent.Finished finished) { writeChromeTraceEvent("buck", "uninstall", ChromeTraceEvent.Phase.END, ImmutableMap.of( "package_name", finished.getPackageName(), "success", Boolean.toString(finished.isSuccess())), finished); } @Subscribe public void artifactCacheEventStarted(ArtifactCacheEvent.Started started) { writeChromeTraceEvent( "buck", started.getCategory(), ChromeTraceEvent.Phase.BEGIN, ImmutableMap.of("rule_key", Joiner.on(", ").join(started.getRuleKeys())), started); } @Subscribe public void artifactCacheEventFinished(ArtifactCacheEvent.Finished finished) { ImmutableMap.Builder<String, String> argumentsBuilder = ImmutableMap.<String, String>builder() .put("success", Boolean.toString(finished.isSuccess())) .put("rule_key", Joiner.on(", ").join(finished.getRuleKeys())); Optionals.putIfPresent(finished.getCacheResult().transform(Functions.toStringFunction()), "cache_result", argumentsBuilder); writeChromeTraceEvent("buck", finished.getCategory(), ChromeTraceEvent.Phase.END, argumentsBuilder.build(), finished); } @Subscribe public void artifactCompressionStarted(ArtifactCompressionEvent.Started started) { writeArtifactCompressionEvent(started, ChromeTraceEvent.Phase.BEGIN); } @Subscribe public void artifactCompressionFinished(ArtifactCompressionEvent.Finished finished) { writeArtifactCompressionEvent(finished, ChromeTraceEvent.Phase.END); } public void writeArtifactCompressionEvent( ArtifactCompressionEvent event, ChromeTraceEvent.Phase phase) { writeChromeTraceEvent( "buck", event.getCategory(), phase, ImmutableMap.of("rule_key", Joiner.on(", ").join(event.getRuleKeys())), event); } @Subscribe public void artifactConnectStarted(ArtifactCacheConnectEvent.Started started) { writeChromeTraceEvent("buck", "artifact_connect", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public void artifactConnectFinished(ArtifactCacheConnectEvent.Finished finished) { writeChromeTraceEvent("buck", "artifact_connect", ChromeTraceEvent.Phase.END, ImmutableMap.<String, String>of(), finished); } @Subscribe public void javacPhaseStarted(JavacPhaseEvent.Started started) { writeChromeTraceEvent( "javac", started.getPhase().toString(), ChromeTraceEvent.Phase.BEGIN, started.getArgs(), started); } @Subscribe public void javacPhaseFinished(JavacPhaseEvent.Finished finished) { writeChromeTraceEvent( "javac", finished.getPhase().toString(), ChromeTraceEvent.Phase.END, finished.getArgs(), finished); } @Subscribe public void annotationProcessingStarted(AnnotationProcessingEvent.Started started) { writeChromeTraceEvent( started.getAnnotationProcessorName(), started.getCategory(), ChromeTraceEvent.Phase.BEGIN, ImmutableMap.<String, String>of(), started); } @Subscribe public void annotationProcessingFinished(AnnotationProcessingEvent.Finished finished) { writeChromeTraceEvent( finished.getAnnotationProcessorName(), finished.getCategory(), ChromeTraceEvent.Phase.END, ImmutableMap.<String, String>of(), finished); } @Subscribe public void compilerPluginDurationEventStarted(CompilerPluginDurationEvent.Started started) { writeChromeTraceEvent( started.getPluginName(), started.getDurationName(), ChromeTraceEvent.Phase.BEGIN, started.getArgs(), started); } @Subscribe public void compilerPluginDurationEventFinished(CompilerPluginDurationEvent.Finished finished) { writeChromeTraceEvent( finished.getPluginName(), finished.getDurationName(), ChromeTraceEvent.Phase.END, finished.getArgs(), finished); } @Subscribe public void memoryPerfStats(PerfStatsTracking.MemoryPerfStatsEvent memory) { writeChromeTraceEvent( "perf", "memory", ChromeTraceEvent.Phase.COUNTER, ImmutableMap.of( "used_memory_mb", Long.toString( SizeUnit.BYTES.toMegabytes( memory.getTotalMemoryBytes() - memory.getFreeMemoryBytes())), "free_memory_mb", Long.toString( SizeUnit.BYTES.toMegabytes(memory.getFreeMemoryBytes())), "total_memory_mb", Long.toString( SizeUnit.BYTES.toMegabytes(memory.getTotalMemoryBytes())) ), memory); } @Subscribe public void traceEvent(TraceEvent event) { writeChromeTraceEvent("buck", event.getEventName(), event.getPhase(), event.getProperties(), event); } @Subscribe public void testStartedEvent(TestSummaryEvent.Started started) { writeChromeTraceEvent("buck", "test", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.of( "test_case_name", started.getTestCaseName(), "test_name", started.getTestName()), started); } @Subscribe public void testFinishedEvent(TestSummaryEvent.Finished finished) { writeChromeTraceEvent("buck", "test", ChromeTraceEvent.Phase.END, ImmutableMap.of( "test_case_name", finished.getTestCaseName(), "test_name", finished.getTestName()), finished); } private void writeChromeTraceEvent(String category, String name, ChromeTraceEvent.Phase phase, ImmutableMap<String, String> arguments, final BuckEvent event) { final ChromeTraceEvent chromeTraceEvent = new ChromeTraceEvent(category, name, phase, 0, event.getThreadId(), TimeUnit.NANOSECONDS.toMicros(event.getNanoTime()), TimeUnit.NANOSECONDS.toMicros(event.getThreadUserNanoTime()), arguments); submitTraceEvent(chromeTraceEvent); } @SuppressWarnings("PMD.EmptyCatchBlock") private void submitTraceEvent(final ChromeTraceEvent chromeTraceEvent) { outputExecutor.submit(new Callable<Void>() { @Override public Void call() throws Exception { try { mapper.writeValue(jsonGenerator, chromeTraceEvent); } catch (IOException e) { // Swallow any failures to write. } return null; } }); } private class TracePathAndStream { private final Path path; private final OutputStream stream; public TracePathAndStream(Path path, OutputStream stream) { this.path = path; this.stream = stream; } public Path getPath() { return path; } public OutputStream getStream() { return stream; } } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Educational * Community License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the * License at: * * http://opensource.org/licenses/ecl2.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.signup.tool.jsf; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.TimeZone; import javax.faces.context.FacesContext; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sakaiproject.signup.logic.SakaiFacade; import org.sakaiproject.signup.logic.SignupCalendarHelper; import org.sakaiproject.signup.logic.SignupMeetingService; import org.sakaiproject.signup.logic.SignupMessageTypes; import org.sakaiproject.signup.logic.SignupUserActionException; import org.sakaiproject.signup.model.MeetingTypes; import org.sakaiproject.signup.model.SignupAttachment; import org.sakaiproject.signup.model.SignupAttendee; import org.sakaiproject.signup.model.SignupMeeting; import org.sakaiproject.signup.model.SignupTimeslot; import org.sakaiproject.signup.tool.jsf.attachment.AttachmentHandler; import org.sakaiproject.signup.tool.util.SignupBeanConstants; import org.sakaiproject.signup.tool.util.Utilities; import org.sakaiproject.time.api.TimeService; import org.sakaiproject.user.api.User; import lombok.Getter; import lombok.Setter; /** * <p> * This is a abstract base class for JSF Signup tool UIBean. It provides some * must-have or common used methods such as getMeetingWrapper(), sakakFacade * etc. * </P> */ abstract public class SignupUIBaseBean implements SignupBeanConstants, SignupMessageTypes, MeetingTypes { protected SakaiFacade sakaiFacade; protected SignupMeetingService signupMeetingService; @Getter @Setter protected SignupCalendarHelper calendarHelper; private AttachmentHandler attachmentHandler; protected SignupMeetingWrapper meetingWrapper; protected List<TimeslotWrapper> timeslotWrappers; protected TimeslotWrapper timeslotWrapper; protected boolean currentUserSignedup; protected static boolean DEFAULT_SEND_EMAIL = "true".equalsIgnoreCase(Utilities.getSignupConfigParamVal( "signup.default.email.notification", "true")) ? true : false; protected static boolean DEFAULT_EXPORT_TO_CALENDAR_TOOL = "true".equalsIgnoreCase(Utilities.getSignupConfigParamVal("signup.default.export.to.calendar.setting", "true")) ? true : false; protected boolean publishToCalendar = DEFAULT_EXPORT_TO_CALENDAR_TOOL; protected boolean sendEmail = DEFAULT_SEND_EMAIL; protected Logger logger = LoggerFactory.getLogger(SignupUIBaseBean.class); protected Boolean publishedSite; //protected boolean sendEmailAttendeeOnly = false; protected String sendEmailToSelectedPeopleOnly = SEND_EMAIL_ALL_PARTICIPANTS; private int maxSlots; private int maxAttendeesPerSlot; protected String customLocation; protected String customCategory; protected static final String ICS_MIME_TYPE="text/calendar"; /** * This method will get the most updated event/meeting data and handle all * the wrapping process for UI. Due to effeciency, the data is only * guarantied fresh at a 10 minutes interval (defined by * <b>dataRefreshInterval</b> value). * * @return a SignupMeetingWrapper object. */ public SignupMeetingWrapper getMeetingWrapper() { if (meetingWrapper != null && meetingWrapper.isRefresh()) { try { SignupMeeting meeting = signupMeetingService.loadSignupMeeting(meetingWrapper.getMeeting().getId(), sakaiFacade.getCurrentUserId(), sakaiFacade.getCurrentLocationId()); meetingWrapper.setMeeting(meeting); updateTimeSlotWrappers(meetingWrapper); } catch (Exception e) { Utilities.addErrorMessage(Utilities.rb.getString("db.error_or_event.notExisted")); logger.error(Utilities.rb.getString("db.error_or_event.notExisted") + " - " + e.getMessage()); } } return meetingWrapper; } /** process new data into Timeslot wrapper for UI purpose */ protected void updateTimeSlotWrappers(SignupMeetingWrapper meetingWrapper) { SignupMeeting meeting = this.meetingWrapper.getMeeting(); if (meeting == null) return; List timeslots = meeting.getSignupTimeSlots(); if (timeslots == null) return; List<TimeslotWrapper> timeslotWrapperList = new ArrayList<TimeslotWrapper>(); setCurrentUserSignedup(false);// reset and make sure to capture new // changes int i = 0; int totalSignedupSlots=0; for (Iterator iter = timeslots.iterator(); iter.hasNext();) { SignupTimeslot elm = (SignupTimeslot) iter.next(); TimeslotWrapper tsw = new TimeslotWrapper(elm, sakaiFacade.getCurrentUserId()); List<AttendeeWrapper> attendeeWrp = new ArrayList<AttendeeWrapper>(); int posIndex = 0; //clean the list List<SignupAttendee> cleanedList = getValidAttendees(elm.getAttendees()); for (SignupAttendee attendee : cleanedList) { AttendeeWrapper attWrp = new AttendeeWrapper(attendee, sakaiFacade.getUserDisplayLastFirstName(attendee .getAttendeeUserId())); attWrp.setPositionIndex(posIndex++); attendeeWrp.add(attWrp); /* current user is already signed up in one of the timeslot */ if (attendee.getAttendeeUserId().equals(sakaiFacade.getCurrentUserId())) //setCurrentUserSignedup(true); totalSignedupSlots++; } //sorting by displayname //JIRA: Signup-204 posIndex = 0; Collections.sort(attendeeWrp); for (AttendeeWrapper attWrp : attendeeWrp) { attWrp.setPositionIndex(posIndex++); } tsw.setAttendeeWrappers(attendeeWrp); tsw.setWaitingList(wrapWaiters(elm.getWaitingList())); tsw.setPositionInTSlist(i++); timeslotWrapperList.add(tsw); } int preferredSlot = meeting.getMaxNumOfSlots(); if (totalSignedupSlots >= preferredSlot){ setCurrentUserSignedup(true); } setTimeslotWrappers(timeslotWrapperList); } /** process the new data into Meeting wrapper for UI purpose */ protected String updateMeetingwrapper(SignupMeeting meeting, String destinationUrl) { /* if null,reload due to exception */ try { if (meeting == null) meeting = signupMeetingService.loadSignupMeeting(this.meetingWrapper.getMeeting().getId(), sakaiFacade .getCurrentUserId(), sakaiFacade.getCurrentLocationId()); getMeetingWrapper().setMeeting(meeting); getMeetingWrapper().resetAvailableStatus();// re-process avail. // status updateTimeSlotWrappers(getMeetingWrapper()); return destinationUrl; } catch (Exception e) { Utilities.addErrorMessage(Utilities.rb.getString("db.error_or_event.notExisted")); logger.warn(Utilities.rb.getString("db.error_or_event.notExisted") + " - " + e.getMessage()); Utilities.resetMeetingList(); return MAIN_EVENTS_LIST_PAGE_URL; } } protected void updateSignupAttachmentWrapper(SignupMeeting meeting){ } /** * setup the event/meeting's signup begin and deadline time and validate it * too */ protected void setSignupBeginDeadlineData(SignupMeeting meeting, int signupBegin, String signupBeginType, int signupDeadline, String signupDeadlineType) throws Exception { Date sBegin = Utilities.subTractTimeToDate(meeting.getStartTime(), signupBegin, signupBeginType); Date sDeadline = Utilities.subTractTimeToDate(meeting.getEndTime(), signupDeadline, signupDeadlineType); if (!START_NOW.equals(signupBeginType) && sBegin.before(new Date())) { // a warning for user Utilities.addErrorMessage(Utilities.rb.getString("warning.your.event.singup.begin.time.passed.today.time")); } meeting.setSignupBegins(sBegin); if (sBegin.after(sDeadline)) throw new SignupUserActionException(Utilities.rb.getString("signup.deadline.is.before.signup.begin")); meeting.setSignupDeadline(sDeadline); } public boolean isMeetingOverRepeatPeriod(Date startTime, Date endTime, int repeatPeriodInDays){ long duration= endTime.getTime()- startTime.getTime(); if( 24*repeatPeriodInDays - duration /(MINUTE_IN_MILLISEC * Hour_In_MINUTES) >= 0 ) return false; return true; } /** convert SignupAttendee to AttendeeWrapper object */ /*private List<AttendeeWrapper> wrapAttendees(List<SignupAttendee> attendees) { List<AttendeeWrapper> attendeeWrp = new ArrayList<AttendeeWrapper>(); int posIndex = 0; //clean the list List<SignupAttendee> cleanedList = getValidAttendees(attendees); for (SignupAttendee attendee : cleanedList) { AttendeeWrapper attWrp = new AttendeeWrapper(attendee, sakaiFacade.getUserDisplayName(attendee.getAttendeeUserId())); attWrp.setPositionIndex(posIndex++); attendeeWrp.add(attWrp); // current user is already signed up in one of the timeslot if (attendee.getAttendeeUserId().equals(sakaiFacade.getCurrentUserId())) { setCurrentUserSignedup(true); } } return attendeeWrp; }*/ /** convert SignupAttendee to AttendeeWrapper object */ private List<AttendeeWrapper> wrapWaiters(List<SignupAttendee> attendees) { List<AttendeeWrapper> attendeeWrp = new ArrayList<AttendeeWrapper>(); for (SignupAttendee attendee : attendees) { attendeeWrp .add(new AttendeeWrapper(attendee, sakaiFacade.getUserDisplayLastFirstName(attendee.getAttendeeUserId()))); } return attendeeWrp; } /** * This is a setter. * * @param meetingWrapper * a SignupMeetingWrapper object. */ public void setMeetingWrapper(SignupMeetingWrapper meetingWrapper) { this.meetingWrapper = meetingWrapper; } /** * Get a SakaiFacade object. * * @return a SakaiFacade object. */ public SakaiFacade getSakaiFacade() { return sakaiFacade; } /** * @return the maxSlots */ public int getMaxSlots() { String maxSlotsStringVal = Utilities.getSignupConfigParamVal("signup.maxSlots", "500"); try{ maxSlots = Integer.parseInt(maxSlotsStringVal); } catch (Exception e){ maxSlots = 500; } return maxSlots; } /** * @param maxSlots the maxSlots to set */ public void setMaxSlots(int maxSlots) { this.maxSlots = maxSlots; } /** * @return the maxAttendeesPerSlot */ public int getMaxAttendeesPerSlot() { String maxAttendeesStringVal = Utilities.getSignupConfigParamVal("signup.maxAttendeesPerSlot", "500"); try{ maxAttendeesPerSlot = Integer.parseInt(maxAttendeesStringVal); } catch(Exception e){ maxAttendeesPerSlot=500; } return maxAttendeesPerSlot; } /** * @param maxAttendeesPerSlot the maxAttendeesPerSlot to set */ public void setMaxAttendeesPerSlot(int maxAttendeesPerSlot) { this.maxAttendeesPerSlot = maxAttendeesPerSlot; } /** * This is a setter. * * @param sakaiFacade * a SakaiFacade object. */ public void setSakaiFacade(SakaiFacade sakaiFacade) { this.sakaiFacade = sakaiFacade; } /** * Get a SignupMeetingService object. * * @return a SignupMeetingService object. */ public SignupMeetingService getSignupMeetingService() { return signupMeetingService; } /** * This is a setter. * * @param signupMeetingService * a SignupMeetingService object. */ public void setSignupMeetingService(SignupMeetingService signupMeetingService) { this.signupMeetingService = signupMeetingService; } /** * This is for UI purpose to see if current user has signed up in the * event/meeting. */ public boolean isCurrentUserSignedup() { return currentUserSignedup; } /** * This is a setter. * * @param currentUserSignedup * a boolean value. */ public void setCurrentUserSignedup(boolean currentUserSignedup) { this.currentUserSignedup = currentUserSignedup; } /** * Get a list of TimeslotWrapper objects. * * @return a list of TimeslotWrapper objects. */ public List<TimeslotWrapper> getTimeslotWrappers() { return timeslotWrappers; } /** * This is a setter. * * @param timeslotWrappers * a list of TimeslotWrapper objects. */ public void setTimeslotWrappers(List<TimeslotWrapper> timeslotWrappers) { this.timeslotWrappers = timeslotWrappers; } /** * This is only for UI purpose to check if the event/meeting is an open * session style and signup is not required. */ public boolean getAnnouncementType() { boolean anoun = false; if (meetingWrapper !=null && meetingWrapper.getMeeting() !=null && ANNOUNCEMENT.equals(meetingWrapper.getMeeting().getMeetingType())) anoun= true; return anoun; } /** * This is only for UI purpose to check if the event/meeting is an * individual style (manay time slots) and it requires signup. */ public boolean getIndividualType() { return INDIVIDUAL.equals(meetingWrapper.getMeeting().getMeetingType()); } /** * This is only for UI purpose to check if the event/meeting is an group * style (only one time slot) and it requires signup. */ public boolean getGroupType() { return GROUP.equals(meetingWrapper.getMeeting().getMeetingType()); } /** * This is only for UI purpose to check if the event/meeting is an * individual style (manay time slots) and it requires signup. */ public boolean getCustomTsType() { return CUSTOM_TIMESLOTS.equals(meetingWrapper.getMeeting().getMeetingType()); } /** * Get a TimeslotWrapper object for UI. * * @return an TimeslotWrapper object. */ public TimeslotWrapper getTimeslotWrapper() { return timeslotWrapper; } /** * This is a setter. * * @param timeslotWrapper * a TimeslotWrapper object. */ public void setTimeslotWrapper(TimeslotWrapper timeslotWrapper) { this.timeslotWrapper = timeslotWrapper; } /** * Check if email should be sent away. This is used by organizer of an * event/meeting. * * @return true if email should be sent away. */ public boolean isSendEmail() { if (!getPublishedSite()) sendEmail = false; return sendEmail; } /** * This is a setter. * * @param sendEmail * a boolean value. */ public void setSendEmail(boolean sendEmail) { this.sendEmail = sendEmail; } /** * This is a getter method for UI. * * @return a constant string. */ public String getIndividual() { return INDIVIDUAL; } /** * This is a getter method for UI. * * @return a constant string. */ public String getGroup() { return GROUP; } /** * This is a getter method for UI. * * @return a constant string. */ public String getAnnouncement() { return ANNOUNCEMENT; } /** * This is a getter method for UI * * @return true if the site is published. */ public Boolean getPublishedSite() { if (this.publishedSite == null) { try { boolean status = sakaiFacade.getSiteService().getSite(sakaiFacade.getCurrentLocationId()).isPublished(); this.publishedSite = new Boolean(status); } catch (Exception e) { logger.warn(e.getMessage()); this.publishedSite = new Boolean(false); } } return publishedSite.booleanValue(); } public void cleanUpUnusedAttachmentCopies(List<SignupAttachment> attachList){ if(attachList !=null){ for (SignupAttachment attach : attachList) { getAttachmentHandler().removeAttachmentInContentHost(attach); } attachList.clear(); } } public boolean getSignupAttachmentEmpty(){ return this.meetingWrapper.getEmptyEventMainAttachment(); } public AttachmentHandler getAttachmentHandler() { return attachmentHandler; } public void setAttachmentHandler(AttachmentHandler attachmentHandler) { this.attachmentHandler = attachmentHandler; } protected void markerTimeslots(List<TimeslotWrapper> TimeSlotWrpList){ int i=0; if(TimeSlotWrpList !=null){ for (TimeslotWrapper tsWrp : TimeSlotWrpList) { tsWrp.setTsMarker(i); i++; } } } public boolean isPublishToCalendar() { return publishToCalendar; } public void setPublishToCalendar(boolean publishToCalendar) { this.publishToCalendar = publishToCalendar; } /*public boolean getSendEmailAttendeeOnly() { return sendEmailAttendeeOnly; } public void setSendEmailAttendeeOnly(boolean sendEmailAttendeeOnly) { this.sendEmailAttendeeOnly = sendEmailAttendeeOnly; }*/ public String getSendEmailToSelectedPeopleOnly() { return sendEmailToSelectedPeopleOnly; } public void setSendEmailToSelectedPeopleOnly( String sendEmailToSelectedPeopleOnly) { this.sendEmailToSelectedPeopleOnly = sendEmailToSelectedPeopleOnly; } /** * Clean the list of attendees by checking that each user is valid * @param attendees List of attendees to be cleaned * @return the cleaned list */ public List<SignupAttendee> getValidAttendees(List<SignupAttendee> attendees) { List<SignupAttendee> cleanedList = new ArrayList<SignupAttendee>(); for(SignupAttendee attendee: attendees){ if(sakaiFacade.checkForUser(attendee.getAttendeeUserId())) { cleanedList.add(attendee); } } return cleanedList; } /** * Gets the userId for a user, given an eid or an email address. * We check if it matches the eid first, then if it matches an email address. * If nothing, return null. * * @param value the string to lookup, could be an eid or an email address * @return the userId or null if User cannot be found */ public String getUserIdForEidOrEmail(String value) { User u = sakaiFacade.getUserByEid(value); if(u==null) { u=sakaiFacade.getUserByEmail(value); } if(u!=null) { return u.getId(); } return null; } /** * Get the eids assocaited with an email address, ie there may be two or more users with the same email address. * We need to be able to handle this in the UI. * * @param email * @return List<String> of eids. */ public List<String> getEidsForEmail(String email) { List<User> users = sakaiFacade.getUsersByEmail(email); List<String> eids = new ArrayList<String>(); for(User u:users) { eids.add(u.getEid()); } return eids; } // Generate a group title based on the input given public String generateGroupTitle(String meetingTitle, SignupTimeslot timeslot) { final char SEPARATOR = '-'; SimpleDateFormat df = new SimpleDateFormat("yyyyMMddHHmm"); StringBuilder sb = new StringBuilder(); sb.append(meetingTitle); sb.append(SEPARATOR); sb.append(df.format(timeslot.getStartTime())); sb.append(SEPARATOR); sb.append(df.format(timeslot.getEndTime())); return sb.toString(); } //generate a group description public String generateGroupDescription(String meetingTitle, SignupTimeslot timeslot) { return Utilities.rb.getString("group.description.default"); } //convert a list of SignupAttendees to a list of userIds public List<String> convertAttendeesToUuids(List<SignupAttendee> attendees) { List<String> uuids = new ArrayList<String>(); for(SignupAttendee a: attendees) { uuids.add(a.getAttendeeUserId()); } return uuids; } //convert a list of AttendeeWrappers to a list of userIds public List<String> convertAttendeeWrappersToUuids(List<AttendeeWrapper> attendees) { List<String> uuids = new ArrayList<String>(); for(AttendeeWrapper a: attendees) { uuids.add(a.getSignupAttendee().getAttendeeUserId()); } return uuids; } /** * Helper to get a formatted string of all attendee email addresses for all tineslots * so we can use them in a mailto link * @return String of all email addresses */ public String getAllAttendeesEmailAddressesFormatted() { Set<String> emails = new HashSet<String>(); StringBuilder sb = new StringBuilder(); for (TimeslotWrapper tsWrapper : timeslotWrappers) { for(AttendeeWrapper atWrapper : tsWrapper.getAttendeeWrappers()) { String email = atWrapper.getAttendeeEmail(); if(StringUtils.isNotBlank(email)){ emails.add(email); } } } for(String e: emails) { sb.append(e); //for compatibility with Outlook, this should be a semicolon not a comma as per the RFC. //Also tested in Thunderbird, Yahoo and GMail. sb.append(';'); } //trim off last , and return return StringUtils.removeEnd(sb.toString(), ";"); } /** * Generate and send for download an ICS file for the meeting. Contains no timeslots, just the meeting itself. * This method is in this particular bean because 1. We have access to the meeting here, and 2. it is used in more than one sub-bean. */ private UserTimeZone userTimeZone; public void downloadICSForMeeting() { String filePath; SignupMeeting meeting = meetingWrapper.getMeeting(); Date defaultEndTime = meeting.getEndTime(); Date dfaultStartTime = meeting.getStartTime(); //pass user preference time in and need to reset back since the object is cached. meeting.setEndTime(getUserTimezonePreferenceDate(defaultEndTime)); meeting.setStartTime(getUserTimezonePreferenceDate(dfaultStartTime)); try{ filePath = calendarHelper.createCalendarFile(Collections.singletonList(calendarHelper.generateVEventForMeeting(meeting))); meeting.setEndTime(defaultEndTime); meeting.setStartTime(dfaultStartTime); }catch(NullPointerException ne){ meeting.setEndTime(defaultEndTime); meeting.setStartTime(dfaultStartTime); handleICSDownloadWarningToUser(); return; } if(StringUtils.isNotBlank(filePath)) { logger.debug("filepath: " + filePath); sendDownload(filePath, ICS_MIME_TYPE); } else { logger.error("Could not generate file for download"); //TODO this could set an error and return perhaps. } } public void downloadICSForMeetingTimeSlot(TimeslotWrapper timeslotWrapper) { String filePath; SignupMeeting meeting = meetingWrapper.getMeeting(); SignupTimeslot currentTimeslot = timeslotWrapper.getTimeSlot(); Date timeslotEndTime = currentTimeslot.getEndTime(); Date timeslotStartTime = currentTimeslot.getStartTime(); Date defaultEndTime = meeting.getEndTime(); Date dfaultStartTime = meeting.getStartTime(); //pass user preference time in and need to reset back since the object is cached. meeting.setEndTime(getUserTimezonePreferenceDate(defaultEndTime)); meeting.setStartTime(getUserTimezonePreferenceDate(dfaultStartTime)); if(currentTimeslot !=null){ currentTimeslot.setStartTime(getUserTimezonePreferenceDate(timeslotStartTime)); currentTimeslot.setEndTime(getUserTimezonePreferenceDate(timeslotEndTime)); } try{ filePath = calendarHelper.createCalendarFile(Collections.singletonList(calendarHelper.generateVEventForTimeslot(meeting, currentTimeslot))); //reset timezone back on serverside if(currentTimeslot !=null){ currentTimeslot.setStartTime(timeslotStartTime); currentTimeslot.setEndTime(timeslotEndTime); } meeting.setEndTime(defaultEndTime); meeting.setStartTime(dfaultStartTime); }catch(NullPointerException ne){ if(currentTimeslot !=null){ currentTimeslot.setStartTime(timeslotStartTime); currentTimeslot.setEndTime(timeslotEndTime); } meeting.setEndTime(defaultEndTime); meeting.setStartTime(dfaultStartTime); handleICSDownloadWarningToUser(); return; } if(StringUtils.isNotBlank(filePath)) { logger.debug("filepath: " + filePath); sendDownload(filePath, ICS_MIME_TYPE); } else { logger.error("Could not generate file for download"); //TODO this could set an error and return perhaps. } } private Date getUserTimezonePreferenceDate(Date dateBasedOnServerTimezone){ /* * Since the external-calendar-service don't honor the timezone information (in * iCal file), which is passed by Date object. Here, we just convert it manually here. * Once the external-claendar-service can do the job, this method can be removed. */ TimeService timeService = sakaiFacade.getTimeService(); TimeZone currentUserTimeZone = timeService.getLocalTimeZone(); Calendar cal = Calendar.getInstance(); cal.setTime(dateBasedOnServerTimezone); cal.setTimeZone(currentUserTimeZone); //get user Pref display hour, day, month and year int userPrefMinute = cal.get(cal.MINUTE); int userPrefHour = cal.get(cal.HOUR_OF_DAY); int userPrefDay = cal.get(cal.DAY_OF_MONTH); int userPrefMonth = cal.get(cal.MONTH); int userPreYear = cal.get(cal.YEAR); Calendar calNew = Calendar.getInstance(); calNew.setTime(dateBasedOnServerTimezone); calNew.set(cal.MINUTE,userPrefMinute); calNew.set(cal.HOUR_OF_DAY, userPrefHour); calNew.set(cal.DAY_OF_MONTH, userPrefDay); calNew.set(cal.MONTH, userPrefMonth); calNew.set(cal.YEAR, userPreYear); return calNew.getTime(); } private void handleICSDownloadWarningToUser(){ logger.error("The site calendar could not be retrieved when using the Signup tool"); String warningFileName = Utilities.rb.getString("ics_file_name_for_failure_warning"); String warningMsg = Utilities.rb.getString("ics_message_for_failure_warning"); sendDownloadWarning(warningFileName,warningMsg); } /** * Send a file for download. * * @param filePath * */ protected void sendDownload(String filePath, String mimeType) { FacesContext fc = FacesContext.getCurrentInstance(); ServletOutputStream out = null; FileInputStream in = null; String filename = StringUtils.substringAfterLast(filePath, File.separator); try { HttpServletResponse response = (HttpServletResponse) fc.getExternalContext().getResponse(); response.reset(); response.setHeader("Pragma", "public"); response.setHeader("Cache-Control","public, must-revalidate, post-check=0, pre-check=0, max-age=0"); response.setContentType(mimeType); response.setHeader("Content-disposition", "attachment; filename=" + filename); in = FileUtils.openInputStream(new File(filePath)); out = response.getOutputStream(); IOUtils.copy(in, out); out.flush(); } catch (IOException ex) { logger.warn("Error generating file for download:" + ex.getMessage()); } finally { IOUtils.closeQuietly(in); IOUtils.closeQuietly(out); } fc.responseComplete(); } /** * Send a warning message to user about failed ICS file generation * @param fileName * @param warningMsg */ protected void sendDownloadWarning(String fileName, String warningMsg) { FacesContext fc = FacesContext.getCurrentInstance(); ServletOutputStream out = null; try { HttpServletResponse response = (HttpServletResponse) fc.getExternalContext().getResponse(); response.reset(); response.setHeader("Pragma", "public"); response.setHeader("Cache-Control","public, must-revalidate, post-check=0, pre-check=0, max-age=0"); response.setContentType("text/plain"); response.setHeader("Content-disposition", "attachment; filename=" + fileName); out = response.getOutputStream(); warningMsg= warningMsg!=null? warningMsg:"Missing Scheduler tool on site"; out.print(warningMsg); out.flush(); } catch (IOException ex) { logger.warn("Error generating file for download:" + ex.getMessage()); } finally { try{ out.close(); }catch (Exception e){ //do nothing; } } fc.responseComplete(); } /** * Is ICS calendar generation enabled in the external calendaring service? * @return true/false */ public boolean isIcsEnabled() { return calendarHelper.isIcsEnabled(); } private String iframeId = ""; /** * This is a getter method which provide current Iframe id for refresh * IFrame purpose. * * @return a String */ public String getIframeId() { HttpServletRequest request = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext() .getRequest(); String iFrameId = (String) request.getAttribute("sakai.tool.placement.id"); return iFrameId; } public void setIframeId(String iframeId) { this.iframeId = iframeId; } public String getCustomLocation() { return customLocation; } public void setCustomLocation(String customLocation) { this.customLocation = customLocation; } public String getCustomCategory() { return customCategory; } public void setCustomCategory(String customCategory) { this.customCategory = customCategory; } }
/** * Copyright (C) 2009 GIP RECIA http://www.recia.fr * @Author (C) 2009 GIP RECIA <contact@recia.fr> * @Contributor (C) 2009 SOPRA http://www.sopragroup.com/ * @Contributor (C) 2011 Pierre Legay <pierre.legay@recia.fr> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * ESUP-Portail Commons - Copyright (c) 2006-2009 ESUP-Portail consortium. */ package org.esupportail.commons.services.ldap; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Locale; import net.sf.ehcache.CacheManager; import org.esupportail.commons.exceptions.ObjectNotFoundException; import org.esupportail.commons.exceptions.UserNotFoundException; import org.esupportail.commons.services.i18n.I18nService; import org.esupportail.commons.services.logging.Logger; import org.esupportail.commons.services.logging.LoggerImpl; import org.esupportail.commons.utils.Assert; import org.springframework.beans.factory.InitializingBean; import org.springframework.ldap.core.LdapTemplate; import org.springframework.ldap.filter.OrFilter; import org.springframework.ldap.filter.WhitespaceWildcardsFilter; import org.springframework.util.StringUtils; /** * An implementation of LdapUserService that delegates to a CachingLdapEntityServiceImpl. */ public class SearchableLdapUserServiceImpl implements LdapUserService, InitializingBean, Serializable { /** * The serialization id. */ private static final long serialVersionUID = 2538032574940842579L; /** * The default unique attribute. */ private static final String DEFAULT_ID_ATTRIBUTE = "uid"; /** * The default object class. */ private static final String DEFAULT_OBJECT_CLASS = "Person"; /** * A logger. */ private final Logger logger = new LoggerImpl(getClass()); /** * The real LDAP entity service to delegate. */ private CachingLdapEntityServiceImpl service; /** * The attribute used by method getLdapUsersFromToken(). */ private String searchAttribute; /** * The attributes that will be shown when searching for a user. */ private List<String> searchDisplayedAttributes; /** * Bean constructor. */ public SearchableLdapUserServiceImpl() { super(); service = new CachingLdapEntityServiceImpl(); service.setIdAttribute(DEFAULT_ID_ATTRIBUTE); service.setObjectClass(DEFAULT_OBJECT_CLASS); } /** * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() */ public void afterPropertiesSet() { if (searchAttribute == null) { logger.info("property searchAttribute is not set, method getLdapUsersFromToken() will fail"); } else { Assert.notEmpty(searchDisplayedAttributes, "property searchDisplayedAttribute is not set"); } service.afterPropertiesSet(); } /** * @see java.lang.Object#toString() */ @Override public String toString() { return getClass().getSimpleName() + "#" + hashCode() + "[" + "searchDisplayedAttributes=[" + getSearchDisplayedAttributes() + "], " + "searchAttribute=[" + searchAttribute + "], " + "service=" + service + "]"; } /** * @see org.esupportail.commons.services.ldap.LdapUserService#getLdapUser(java.lang.String) */ public LdapUser getLdapUser(final String id) throws LdapException, UserNotFoundException { try { return LdapUserImpl.createLdapUser(service.getLdapEntity(id)); } catch (ObjectNotFoundException e) { throw new UserNotFoundException(e); } } /** * @see org.esupportail.commons.services.ldap.LdapUserService#getLdapUsersFromFilter(java.lang.String) */ public List<LdapUser> getLdapUsersFromFilter(final String filterExpr) throws LdapException { return LdapUserImpl.createLdapUsers(service.getLdapEntitiesFromFilter(filterExpr)); } /** * @see org.esupportail.commons.services.ldap.LdapUserService#getLdapUsersFromToken(java.lang.String) */ public List<LdapUser> getLdapUsersFromToken(final String token) throws LdapException { OrFilter filter = new OrFilter(); filter.or(new WhitespaceWildcardsFilter(searchAttribute, token)); filter.or(new WhitespaceWildcardsFilter(service.getIdAttribute(), token)); return getLdapUsersFromFilter(filter.encode()); } /** * @see org.esupportail.commons.services.ldap.LdapUserService#userMatchesFilter( * java.lang.String, java.lang.String) */ public boolean userMatchesFilter(final String id, final String filter) throws LdapException { return service.entityMatchesFilter(id, filter); } /** * @see org.esupportail.commons.services.ldap.BasicLdapService#getStatistics(java.util.Locale) */ public List<String> getStatistics(final Locale locale) { return service.getStatistics(locale); } /** * @see org.esupportail.commons.services.ldap.BasicLdapService#resetStatistics() */ public void resetStatistics() { service.resetStatistics(); } /** * Set the cache manager. * @param cacheManager */ public void setCacheManager(final CacheManager cacheManager) { service.setCacheManager(cacheManager); } /** * Set the cache name. * @param cacheName */ public void setCacheName(final String cacheName) { service.setCacheName(cacheName); } /** * Set the dnSubPath. * @param dnSubPath */ public void setDnSubPath(final String dnSubPath) { service.setDnSubPath(dnSubPath); } /** * Set the i18nService. * @param i18nService */ public void setI18nService(final I18nService i18nService) { service.setI18nService(i18nService); } /** * Set the idAttribute. * @param idAttribute */ public void setIdAttribute(final String idAttribute) { service.setIdAttribute(idAttribute); } /** * Set the attributes. * @param attributes */ public void setAttributes(final List<String> attributes) { service.setAttributes(attributes); } /** * Set the attributes. * @param attributes */ public void setAttributesAsString(final String attributes) { List<String> list = new ArrayList<String>(); for (String attribute : attributes.split(",")) { if (StringUtils.hasText(attribute)) { if (!list.contains(attribute)) { list.add(attribute); } } } setAttributes(list); } /** * Set the ldapTemplate. * @param ldapTemplate */ public void setLdapTemplate(final LdapTemplate ldapTemplate) { service.setLdapTemplate(ldapTemplate); } /** * Set the objectClass. * @param objectClass */ public void setObjectClass(final String objectClass) { service.setObjectClass(objectClass); } /** * Set the testFilter. * @param testFilter */ public void setTestFilter(final String testFilter) { service.setTestFilter(testFilter); } /** * @see org.esupportail.commons.services.ldap.BasicLdapService#supportStatistics() */ public boolean supportStatistics() { return service.supportStatistics(); } /** * @see org.esupportail.commons.services.ldap.BasicLdapService#supportsTest() */ public boolean supportsTest() { return service.supportsTest(); } /** * @see org.esupportail.commons.services.ldap.BasicLdapService#test() */ public void test() { service.test(); } /** * @see org.esupportail.commons.services.ldap.BasicLdapService#testLdapFilter(java.lang.String) */ public String testLdapFilter(final String filterExpr) throws LdapException { return service.testLdapFilter(filterExpr); } /** * @see org.esupportail.commons.services.ldap.LdapUserService#getSearchDisplayedAttributes() */ public List<String> getSearchDisplayedAttributes() { return searchDisplayedAttributes; } /** * @param searchDisplayedAttributes the searchDisplayedAttributes to set */ public void setSearchDisplayedAttributes(final List<String> searchDisplayedAttributes) { this.searchDisplayedAttributes = searchDisplayedAttributes; } /** * @param searchDisplayedAttributes the searchDisplayedAttributes to set */ public void setSearchDisplayedAttributesAsString(final String searchDisplayedAttributes) { List<String> list = new ArrayList<String>(); for (String attribute : searchDisplayedAttributes.split(",")) { if (StringUtils.hasText(attribute)) { if (!list.contains(attribute)) { list.add(attribute); } } } setSearchDisplayedAttributes(list); } /** * @param searchAttribute the searchAttribute to set */ public void setSearchAttribute(final String searchAttribute) { this.searchAttribute = searchAttribute; } /** * @return the unique id attribute * @see org.esupportail.commons.services.ldap.SimpleLdapEntityServiceImpl#getIdAttribute() */ public String getIdAttribute() { return service.getIdAttribute(); } }
//$Id: SQLLoaderTest.java 11383 2007-04-02 15:34:02Z steve.ebersole@jboss.com $ package org.hibernate.test.legacy; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.junit.Test; import org.hibernate.HibernateException; import org.hibernate.Query; import org.hibernate.SQLQuery; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.dialect.HSQLDialect; import org.hibernate.dialect.MySQLDialect; import org.hibernate.dialect.PostgreSQL81Dialect; import org.hibernate.dialect.PostgreSQLDialect; import org.hibernate.dialect.TimesTenDialect; import org.hibernate.testing.FailureExpected; import org.hibernate.testing.SkipForDialect; import org.hibernate.testing.TestForIssue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; public class SQLLoaderTest extends LegacyTestCase { static int nextInt = 1; static long nextLong = 1; @Override public String[] getMappings() { return new String[] { "legacy/ABC.hbm.xml", "legacy/Category.hbm.xml", "legacy/Simple.hbm.xml", "legacy/Fo.hbm.xml", "legacy/SingleSeveral.hbm.xml", "legacy/Componentizable.hbm.xml", "legacy/CompositeIdId.hbm.xml" }; } @Test public void testTS() throws Exception { Session session = openSession(); Transaction txn = session.beginTransaction(); Simple sim = new Simple( Long.valueOf(1) ); sim.setDate( new Date() ); session.save( sim ); Query q = session.createSQLQuery( "select {sim.*} from Simple {sim} where {sim}.date_ = ?" ).addEntity( "sim", Simple.class ); q.setTimestamp( 0, sim.getDate() ); assertTrue ( q.list().size()==1 ); session.delete(sim); txn.commit(); session.close(); } @Test public void testFindBySQLStar() throws HibernateException, SQLException { Session session = openSession(); session.beginTransaction(); for ( Object entity : session.createQuery( "from Assignable" ).list() ) { session.delete( entity ); } for ( Object entity : session.createQuery( "from Category" ).list() ) { session.delete( entity ); } for ( Object entity : session.createQuery( "from Simple" ).list() ) { session.delete( entity ); } for ( Object entity : session.createQuery( "from A" ).list() ) { session.delete( entity ); } Category s = new Category(); s.setName(String.valueOf(nextLong++)); session.save(s); Simple simple = new Simple( Long.valueOf(nextLong++) ); simple.init(); session.save( simple ); A a = new A(); session.save(a); B b = new B(); session.save(b); session.flush(); session.createSQLQuery( "select {category.*} from category {category}" ).addEntity( "category", Category.class ).list(); session.createSQLQuery( "select {simple.*} from Simple {simple}" ).addEntity( "simple", Simple.class ).list(); session.createSQLQuery( "select {a.*} from TA {a}" ).addEntity( "a", A.class ).list(); session.getTransaction().commit(); session.close(); } @Test public void testFindBySQLProperties() throws HibernateException, SQLException { Session session = openSession(); session.beginTransaction(); for ( Object entity : session.createQuery( "from Category" ).list() ) { session.delete( entity ); } Category s = new Category(); s.setName(String.valueOf(nextLong++)); session.save(s); s = new Category(); s.setName("WannaBeFound"); session.flush(); Query query = session.createSQLQuery( "select {category.*} from category {category} where {category}.name = :name" ) .addEntity( "category", Category.class ); query.setProperties(s); //query.setParameter("name", s.getName()); query.list(); query = session.createSQLQuery( "select {category.*} from category {category} where {category}.name in (:names)" ) .addEntity( "category", Category.class ); String[] str = new String[] { "WannaBeFound", "NotThere" }; query.setParameterList("names", str); query.uniqueResult(); query = session.createSQLQuery( "select {category.*} from category {category} where {category}.name in :names" ) .addEntity( "category", Category.class ); query.setParameterList("names", str); query.uniqueResult(); query = session.createSQLQuery( "select {category.*} from category {category} where {category}.name in (:names)" ) .addEntity( "category", Category.class ); str = new String[] { "WannaBeFound" }; query.setParameterList("names", str); query.uniqueResult(); query = session.createSQLQuery( "select {category.*} from category {category} where {category}.name in :names" ) .addEntity( "category", Category.class ); query.setParameterList("names", str); query.uniqueResult(); session.getTransaction().commit(); session.close(); } @Test public void testFindBySQLAssociatedObjects() throws HibernateException, SQLException { Session s = openSession(); s.beginTransaction(); for ( Object entity : s.createQuery( "from Assignable" ).list() ) { s.delete( entity ); } for ( Object entity : s.createQuery( "from Category" ).list() ) { s.delete( entity ); } Category c = new Category(); c.setName("NAME"); Assignable assn = new Assignable(); assn.setId("i.d."); List l = new ArrayList(); l.add(c); assn.setCategories(l); c.setAssignable(assn); s.save(assn); s.getTransaction().commit(); s.close(); s = openSession(); s.beginTransaction(); List list = s.createSQLQuery( "select {category.*} from category {category}" ).addEntity( "category", Category.class ).list(); list.get(0); s.getTransaction().commit(); s.close(); if ( getDialect() instanceof MySQLDialect ) { return; } s = openSession(); s.beginTransaction(); Query query = s.getNamedQuery("namedsql"); assertNotNull(query); list = query.list(); assertNotNull(list); Object[] values = (Object[]) list.get(0); assertNotNull(values[0]); assertNotNull(values[1]); assertTrue("wrong type: " + values[0].getClass(), values[0] instanceof Category); assertTrue("wrong type: " + values[1].getClass(), values[1] instanceof Assignable); s.getTransaction().commit(); s.close(); } @Test @SkipForDialect( MySQLDialect.class ) public void testPropertyResultSQL() throws HibernateException, SQLException { Session s = openSession(); s.beginTransaction(); for ( Object entity : s.createQuery( "from Assignable" ).list() ) { s.delete( entity ); } for ( Object entity : s.createQuery( "from Category" ).list() ) { s.delete( entity ); } Category c = new Category(); c.setName("NAME"); Assignable assn = new Assignable(); assn.setId("i.d."); List l = new ArrayList(); l.add(c); assn.setCategories(l); c.setAssignable(assn); s.save(assn); s.getTransaction().commit(); s.close(); s = openSession(); s.beginTransaction(); Query query = s.getNamedQuery("nonaliasedsql"); assertNotNull(query); List list = query.list(); assertNotNull(list); assertTrue(list.get(0) instanceof Category); s.getTransaction().commit(); s.close(); } @Test public void testFindBySQLMultipleObject() throws HibernateException, SQLException { Session s = openSession(); s.beginTransaction(); for ( Object entity : s.createQuery( "from Assignable" ).list() ) { s.delete( entity ); } for ( Object entity : s.createQuery( "from Category" ).list() ) { s.delete( entity ); } s.getTransaction().commit(); s.close(); s = openSession(); s.beginTransaction(); Category c = new Category(); c.setName("NAME"); Assignable assn = new Assignable(); assn.setId("i.d."); List l = new ArrayList(); l.add(c); assn.setCategories(l); c.setAssignable(assn); s.save(assn); s.flush(); c = new Category(); c.setName("NAME2"); assn = new Assignable(); assn.setId("i.d.2"); l = new ArrayList(); l.add(c); assn.setCategories(l); c.setAssignable(assn); s.save(assn); s.flush(); assn = new Assignable(); assn.setId("i.d.3"); s.save(assn); s.getTransaction().commit(); s.close(); if ( getDialect() instanceof MySQLDialect ) { return; } s = openSession(); s.beginTransaction(); String sql = "select {category.*}, {assignable.*} from category {category}, \"assign-able\" {assignable}"; List list = s.createSQLQuery( sql ).addEntity( "category", Category.class ).addEntity( "assignable", Assignable.class ).list(); assertTrue(list.size() == 6); // crossproduct of 2 categories x 3 assignables assertTrue(list.get(0) instanceof Object[]); s.getTransaction().commit(); s.close(); } @Test public void testFindBySQLParameters() throws HibernateException, SQLException { Session s = openSession(); s.beginTransaction(); for ( Object entity : s.createQuery( "from Assignable" ).list() ) { s.delete( entity ); } for ( Object entity : s.createQuery( "from Category" ).list() ) { s.delete( entity ); } s.getTransaction().commit(); s.close(); s = openSession(); s.beginTransaction(); Category c = new Category(); c.setName("Good"); Assignable assn = new Assignable(); assn.setId("i.d."); List l = new ArrayList(); l.add(c); assn.setCategories(l); c.setAssignable(assn); s.save(assn); s.flush(); c = new Category(); c.setName("Best"); assn = new Assignable(); assn.setId("i.d.2"); l = new ArrayList(); l.add(c); assn.setCategories(l); c.setAssignable(assn); s.save(assn); s.flush(); c = new Category(); c.setName("Better"); assn = new Assignable(); assn.setId("i.d.7"); l = new ArrayList(); l.add(c); assn.setCategories(l); c.setAssignable(assn); s.save(assn); s.flush(); assn = new Assignable(); assn.setId("i.d.3"); s.save(assn); s.getTransaction().commit(); s.close(); s = openSession(); s.beginTransaction(); Query basicParam = s.createSQLQuery( "select {category.*} from category {category} where {category}.name = 'Best'" ) .addEntity( "category", Category.class ); List list = basicParam.list(); assertEquals(1, list.size()); Query unnamedParam = s.createSQLQuery( "select {category.*} from category {category} where {category}.name = ? or {category}.name = ?" ) .addEntity( "category", Category.class ); unnamedParam.setString(0, "Good"); unnamedParam.setString(1, "Best"); list = unnamedParam.list(); assertEquals(2, list.size()); Query namedParam = s.createSQLQuery( "select {category.*} from category {category} where ({category}.name=:firstCat or {category}.name=:secondCat)" ) .addEntity( "category", Category.class); namedParam.setString("firstCat", "Better"); namedParam.setString("secondCat", "Best"); list = namedParam.list(); assertEquals(2, list.size()); s.getTransaction().commit(); s.close(); } @Test @SkipForDialect( { HSQLDialect.class, PostgreSQL81Dialect.class, PostgreSQLDialect.class } ) public void testEscapedJDBC() throws HibernateException, SQLException { Session session = openSession(); session.beginTransaction(); for ( Object entity : session.createQuery( "from A" ).list() ) { session.delete( entity ); } A savedA = new A(); savedA.setName("Max"); session.save(savedA); B savedB = new B(); session.save(savedB); session.flush(); int count = session.createQuery("from A").list().size(); session.getTransaction().commit(); session.close(); session = openSession(); session.beginTransaction(); Query query; if( getDialect() instanceof TimesTenDialect) { // TimesTen does not permit general expressions (like UPPER) in the second part of a LIKE expression, // so we execute a similar test query = session.createSQLQuery("select identifier_column as {a.id}, clazz_discriminata as {a.class}, count_ as {a.count}, name as {a.name} from TA where {fn ucase(name)} like 'MAX'" ) .addEntity( "a", A.class ); } else { query = session.createSQLQuery( "select identifier_column as {a.id}, clazz_discriminata as {a.class}, count_ as {a.count}, name as {a.name} from TA where {fn ucase(name)} like {fn ucase('max')}" ) .addEntity( "a", A.class ); } List list = query.list(); assertNotNull(list); assertEquals(1, list.size()); session.getTransaction().commit(); session.close(); } @Test public void testDoubleAliasing() throws HibernateException, SQLException { Session session = openSession(); session.beginTransaction(); for ( Object entity : session.createQuery( "from A" ).list() ) { session.delete( entity ); } A savedA = new A(); savedA.setName("Max"); session.save(savedA); B savedB = new B(); session.save(savedB); session.flush(); int count = session.createQuery("from A").list().size(); session.getTransaction().commit(); session.close(); session = openSession(); session.beginTransaction(); String sql = "select a.identifier_column as {a1.id}, " + " a.clazz_discriminata as {a1.class}, " + " a.count_ as {a1.count}, " + " a.name as {a1.name}, " + " b.identifier_column as {a2.id}, " + " b.clazz_discriminata as {a2.class}, " + " b.count_ as {a2.count}, " + " b.name as {a2.name} " + "from TA a, TA b " + "where a.identifier_column = b.identifier_column"; Query query = session.createSQLQuery( sql ).addEntity( "a1", A.class ).addEntity( "a2", A.class ); List list = query.list(); assertNotNull(list); assertEquals(2, list.size()); session.getTransaction().commit(); session.close(); } @Test public void testEmbeddedCompositeProperties() throws HibernateException, SQLException { Session session = openSession(); session.beginTransaction(); Single s = new Single(); s.setId("my id"); s.setString("string 1"); session.save(s); session.getTransaction().commit(); session = openSession(); session.beginTransaction(); SQLQuery query = session.createSQLQuery( "select {sing.*} from Single {sing}" ).addEntity( "sing", Single.class ); List list = query.list(); assertTrue(list.size()==1); session.clear(); query = session.createSQLQuery( "select {sing.*} from Single {sing} where sing.id = ?" ).addEntity( "sing", Single.class ); query.setString(0, "my id"); list = query.list(); assertTrue(list.size()==1); session.clear(); query = session.createSQLQuery( "select s.id as {sing.id}, s.string_ as {sing.string}, s.prop as {sing.prop} from Single s where s.id = ?" ) .addEntity( "sing", Single.class ); query.setString(0, "my id"); list = query.list(); assertTrue(list.size()==1); session.clear(); query = session.createSQLQuery( "select s.id as {sing.id}, s.string_ as {sing.string}, s.prop as {sing.prop} from Single s where s.id = ?" ) .addEntity( "sing", Single.class ); query.setString(0, "my id"); list = query.list(); assertTrue(list.size()==1); session.getTransaction().commit(); session.close(); } @Test @FailureExpected( jiraKey = "unknown" ) public void testReturnPropertyComponentRename() throws HibernateException, SQLException { // failure expected because this was a regression introduced previously which needs to get tracked down. Componentizable componentizable = setupComponentData(); Session session = openSession(); session.beginTransaction(); Query namedQuery = session.getNamedQuery("queryComponentWithOtherColumn"); List list = namedQuery.list(); assertEquals(1, list.size()); assertEquals( "flakky comp", ( (Componentizable) list.get(0) ).getComponent().getName() ); session.clear(); session.delete(componentizable); session.getTransaction().commit(); session.close(); } @Test public void testComponentStar() throws HibernateException, SQLException { componentTest("select {comp.*} from Componentizable comp"); } @Test public void testComponentNoStar() throws HibernateException, SQLException { componentTest("select comp.id as {comp.id}, comp.nickName as {comp.nickName}, comp.name as {comp.component.name}, comp.subName as {comp.component.subComponent.subName}, comp.subName1 as {comp.component.subComponent.subName1} from Componentizable comp"); } private void componentTest(String sql) throws SQLException { Componentizable c = setupComponentData(); Session session = openSession(); session.beginTransaction(); SQLQuery q = session.createSQLQuery( sql ).addEntity( "comp", Componentizable.class ); List list = q.list(); assertEquals(list.size(),1); Componentizable co = (Componentizable) list.get(0); assertEquals(c.getNickName(), co.getNickName()); assertEquals(c.getComponent().getName(), co.getComponent().getName()); assertEquals(c.getComponent().getSubComponent().getSubName(), co.getComponent().getSubComponent().getSubName()); session.delete( co ); session.getTransaction().commit(); session.close(); } private Componentizable setupComponentData() throws SQLException { Session session = sessionFactory().openSession(); session.beginTransaction(); Componentizable c = new Componentizable(); c.setNickName("Flacky"); Component component = new Component(); component.setName("flakky comp"); SubComponent subComponent = new SubComponent(); subComponent.setSubName("subway"); component.setSubComponent(subComponent); c.setComponent(component); session.save(c); session.getTransaction().commit(); session.clear(); return c; } @Test @SkipForDialect( MySQLDialect.class ) public void testFindSimpleBySQL() throws Exception { Session session = openSession(); session.beginTransaction(); Category s = new Category(); s.setName(String.valueOf(nextLong++)); session.save(s); session.flush(); Query query = session.createSQLQuery( "select s.category_key_col as {category.id}, s.name as {category.name}, s.\"assign-able-id\" as {category.assignable} from {category} s" ) .addEntity( "category", Category.class ); List list = query.list(); assertNotNull(list); assertTrue(list.size() > 0); assertTrue(list.get(0) instanceof Category); session.getTransaction().commit(); session.close(); // How do we handle objects with composite id's ? (such as Single) } @Test public void testFindBySQLSimpleByDiffSessions() throws Exception { Session session = openSession(); session.beginTransaction(); Category s = new Category(); s.setName(String.valueOf(nextLong++)); session.save(s); session.getTransaction().commit(); session.close(); if ( getDialect() instanceof MySQLDialect ) { return; } session = openSession(); session.beginTransaction(); Query query = session.createSQLQuery( "select s.category_key_col as {category.id}, s.name as {category.name}, s.\"assign-able-id\" as {category.assignable} from {category} s" ) .addEntity( "category", Category.class ); List list = query.list(); assertNotNull(list); assertTrue(list.size() > 0); assertTrue(list.get(0) instanceof Category); // How do we handle objects that does not have id property (such as Simple ?) // How do we handle objects with composite id's ? (such as Single) session.getTransaction().commit(); session.close(); } @Test public void testFindBySQLDiscriminatedSameSession() throws Exception { Session session = openSession(); session.beginTransaction(); for ( Object entity : session.createQuery( "from A" ).list() ) { session.delete( entity ); } A savedA = new A(); session.save(savedA); B savedB = new B(); session.save(savedB); session.flush(); Query query = session.createSQLQuery( "select identifier_column as {a.id}, clazz_discriminata as {a.class}, name as {a.name}, count_ as {a.count} from TA {a}" ) .addEntity( "a", A.class ); List list = query.list(); assertNotNull(list); assertEquals(2, list.size()); A a1 = (A) list.get(0); A a2 = (A) list.get(1); assertTrue((a2 instanceof B) || (a1 instanceof B)); assertFalse(a1 instanceof B && a2 instanceof B); if (a1 instanceof B) { assertSame(a1, savedB); assertSame(a2, savedA); } else { assertSame(a2, savedB); assertSame(a1, savedA); } session.clear(); List list2 = session.getNamedQuery("propertyResultDiscriminator").list(); assertEquals(2, list2.size()); session.getTransaction().commit(); session.close(); } @Test public void testFindBySQLDiscriminatedDiffSession() throws Exception { Session session = openSession(); session.beginTransaction(); for ( Object entity : session.createQuery( "from A" ).list() ) { session.delete( entity ); } A savedA = new A(); session.save(savedA); B savedB = new B(); session.save(savedB); session.getTransaction().commit(); int count = session.createQuery("from A").list().size(); session.close(); session = openSession(); session.beginTransaction(); Query query = session.createSQLQuery( "select identifier_column as {a.id}, clazz_discriminata as {a.class}, count_ as {a.count}, name as {a.name} from TA" ) .addEntity( "a", A.class ); List list = query.list(); assertNotNull(list); assertEquals(count, list.size()); session.getTransaction().commit(); session.close(); } @Test @TestForIssue( jiraKey = "HHH-21" ) public void testCompositeIdId() throws HibernateException, SQLException { Session s = openSession(); s.beginTransaction(); CompositeIdId id = new CompositeIdId(); id.setName("Max"); id.setSystem("c64"); id.setId("games"); s.save(id); s.getTransaction().commit(); s.close(); s = openSession(); s.beginTransaction(); // having a composite id with one property named id works since the map used by sqlloader to map names to properties handles it. String sql = "select system as {c.system}, id as {c.id}, name as {c.name}, foo as {c.composite.foo}, bar as {c.composite.bar} from CompositeIdId where system=? and id=?"; SQLQuery query = s.createSQLQuery( sql ).addEntity( "c", CompositeIdId.class ); query.setString(0, "c64"); query.setString(1, "games"); CompositeIdId id2 = (CompositeIdId) query.uniqueResult(); check(id, id2); s.getTransaction().commit(); s.close(); s = openSession(); s.beginTransaction(); CompositeIdId useForGet = new CompositeIdId(); useForGet.setSystem("c64"); useForGet.setId("games"); // this doesn't work since the verification does not take column span into respect! CompositeIdId getted = (CompositeIdId) s.get(CompositeIdId.class, useForGet); check(id,getted); s.getTransaction().commit(); s.close(); } private void check(CompositeIdId id, CompositeIdId id2) { assertEquals(id,id2); assertEquals(id.getName(), id2.getName()); assertEquals(id.getId(), id2.getId()); assertEquals(id.getSystem(), id2.getSystem()); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.lang.resolve.ast; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.PsiSuperMethodImplUtil; import com.intellij.psi.impl.light.LightMethodBuilder; import com.intellij.psi.impl.light.LightParameter; import com.intellij.psi.impl.source.tree.java.PsiCompositeModifierList; import com.intellij.psi.util.MethodSignature; import com.intellij.psi.util.MethodSignatureUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.hash.HashSet; import gnu.trove.THashMap; import icons.JetgroovyIcons; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.GroovyLanguage; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrField; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrExtendsClause; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrImplementsClause; import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.GrTypeDefinition; import org.jetbrains.plugins.groovy.lang.psi.impl.GrAnnotationUtil; import org.jetbrains.plugins.groovy.lang.psi.impl.PsiImplUtil; import org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions.TypesUtil; import org.jetbrains.plugins.groovy.lang.psi.util.GrClassImplUtil; import org.jetbrains.plugins.groovy.lang.psi.util.GroovyCommonClassNames; import java.util.*; /** * @author Max Medvedev */ public class DelegatedMethodsContributor extends AstTransformContributor { @NotNull @Override public Members collect(@NotNull GrTypeDefinition clazz) { final Members result = Members.create(); doCollectMethods(clazz, result.getMethods()); doCollectImplementsTypes(clazz, result.getImplementsTypes()); return result; } private static void doCollectMethods(@NotNull final GrTypeDefinition clazz, @NotNull Collection<PsiMethod> collector) { Set<PsiClass> processed = new HashSet<PsiClass>(); if (!checkForDelegate(clazz)) return; Map<MethodSignature, PsiMethod> signatures = new THashMap<MethodSignature, PsiMethod>(MethodSignatureUtil.METHOD_PARAMETERS_ERASURE_EQUALITY); initializeSignatures(clazz, PsiSubstitutor.EMPTY, signatures, processed); List<PsiMethod> methods = new ArrayList<PsiMethod>(); process(clazz, PsiSubstitutor.EMPTY, true, new HashSet<PsiClass>(), processed, methods, clazz, false); final Set<PsiMethod> result = new LinkedHashSet<PsiMethod>(); for (PsiMethod method : methods) { addMethodChecked(signatures, method, PsiSubstitutor.EMPTY, result); } collector.addAll(result); } private static boolean checkForDelegate(GrTypeDefinition clazz) { for (GrField field : clazz.getFields()) { if (PsiImplUtil.getAnnotation(field, GroovyCommonClassNames.GROOVY_LANG_DELEGATE) != null) return true; } return false; } /** * Adds 'method' to 'signatures' if it doesn't yet contain any method with the same signature or replaces abstract methods */ private static void addMethodChecked(Map<MethodSignature, PsiMethod> signatures, PsiMethod method, PsiSubstitutor substitutor, @Nullable Set<PsiMethod> resultSet) { if (method.isConstructor()) return; if (method.hasModifierProperty(PsiModifier.STATIC)) return; final MethodSignature signature = method.getSignature(substitutor); final PsiMethod old = signatures.get(signature); if (old != null) { //if (method.hasModifierProperty(PsiModifier.ABSTRACT)) return; if (!old.hasModifierProperty(PsiModifier.ABSTRACT)) return; if (resultSet != null) resultSet.remove(old); } signatures.put(signature, method); if (resultSet != null) resultSet.add(method); } /** * Adds all code methods of clazz add its super classes to signatures. Doesn't walk into interfaces because all methods from them will be overloaded in any case. * Besides Some of interfaces came from delegates and they should be visited during the following processing. * * @param clazz current class * @param substitutor super class substitutor of clazz * @param signatures map to initialize * @param classes already visited classes */ private static void initializeSignatures(PsiClass clazz, PsiSubstitutor substitutor, Map<MethodSignature, PsiMethod> signatures, Set<PsiClass> classes) { if (clazz.isInterface()) return; if (classes.add(clazz)) { final List<PsiMethod> methods; if (clazz instanceof GrTypeDefinition) { methods = new ArrayList<PsiMethod>(); GrClassImplUtil.collectMethodsFromBody((GrTypeDefinition)clazz, methods); } else { methods = Arrays.asList(clazz.getMethods()); } for (PsiMethod method : methods) { addMethodChecked(signatures, method, substitutor, null); } for (PsiClassType type : getSuperTypes(clazz)) { final PsiClassType.ClassResolveResult result = type.resolveGenerics(); final PsiClass superClass = result.getElement(); if (superClass == null) continue; final PsiSubstitutor superClassSubstitutor = TypeConversionUtil.getSuperClassSubstitutor(superClass, clazz, substitutor); initializeSignatures(superClass, superClassSubstitutor, signatures, classes); } } } /** * The key method of contributor. It collects all delegating methods of clazz * * @param clazz class to process * @param processedWithoutDeprecated already visited classes which deprecated methods were not processsed * @param processedAll already visited classes which all methods were processed * @param collector result collection */ private static void process(PsiClass clazz, PsiSubstitutor superClassSubstitutor, boolean shouldProcessDeprecated, Set<PsiClass> processedWithoutDeprecated, Set<PsiClass> processedAll, List<PsiMethod> collector, GrTypeDefinition classToDelegateTo, boolean keepParameterAnnotations) { final List<PsiMethod> result = new ArrayList<PsiMethod>(); //process super methods before delegated methods for (PsiClassType superType : getSuperTypes(clazz)) { processClassInner(superType, superClassSubstitutor, shouldProcessDeprecated, result, classToDelegateTo, processedWithoutDeprecated, processedAll, keepParameterAnnotations); } if (clazz instanceof GrTypeDefinition) { //search for @Delegate fields and collect methods from them for (GrField field : ((GrTypeDefinition)clazz).getFields()) { final PsiAnnotation delegate = PsiImplUtil.getAnnotation(field, GroovyCommonClassNames.GROOVY_LANG_DELEGATE); if (delegate == null) continue; final PsiType type = field.getDeclaredType(); if (!(type instanceof PsiClassType)) continue; processClassInner((PsiClassType)type, superClassSubstitutor, shouldDelegateDeprecated(delegate), result, classToDelegateTo, processedWithoutDeprecated, processedAll, shouldKeepParameterAnnotations(delegate)); } } collector.addAll(result); } private static List<PsiClassType> getSuperTypes(PsiClass clazz) { if (clazz instanceof GrTypeDefinition) { final GrExtendsClause elist = ((GrTypeDefinition)clazz).getExtendsClause(); final GrImplementsClause ilist = ((GrTypeDefinition)clazz).getImplementsClause(); if (elist == null && ilist == null) return ContainerUtil.emptyList(); final ArrayList<PsiClassType> types = new ArrayList<PsiClassType>(); if (elist != null) ContainerUtil.addAll(types, elist.getReferencedTypes()); if (ilist != null) ContainerUtil.addAll(types, ilist.getReferencedTypes()); return types; } else { final PsiReferenceList elist = clazz.getExtendsList(); final PsiReferenceList ilist = clazz.getImplementsList(); if (elist == null && ilist == null) return ContainerUtil.emptyList(); final ArrayList<PsiClassType> types = new ArrayList<PsiClassType>(); if (elist != null) ContainerUtil.addAll(types, elist.getReferencedTypes()); if (ilist != null) ContainerUtil.addAll(types, ilist.getReferencedTypes()); return types; } } private static void processClassInner(PsiClassType type, PsiSubstitutor superClassSubstitutor, boolean shouldProcessDeprecated, List<PsiMethod> result, GrTypeDefinition classToDelegateTo, Set<PsiClass> processedWithoutDeprecated, Set<PsiClass> processedAll, boolean keepParameterAnnotationsNew) { final PsiClassType.ClassResolveResult resolveResult = type.resolveGenerics(); final PsiClass psiClass = resolveResult.getElement(); if (psiClass == null) return; final String qname = psiClass.getQualifiedName(); if (CommonClassNames.JAVA_LANG_OBJECT.equals(qname)) return; if (GroovyCommonClassNames.GROOVY_OBJECT.equals(qname)) return; if (GroovyCommonClassNames.GROOVY_OBJECT_SUPPORT.equals(qname)) return; final PsiSubstitutor substitutor = TypesUtil.composeSubstitutors(resolveResult.getSubstitutor(), superClassSubstitutor); if (processedAll.contains(psiClass)) return; if (!shouldProcessDeprecated && processedWithoutDeprecated.contains(psiClass)) return; if (shouldProcessDeprecated) { processedAll.add(psiClass); } else { processedWithoutDeprecated.add(psiClass); } collectMethods(psiClass, substitutor, shouldProcessDeprecated, classToDelegateTo, result, keepParameterAnnotationsNew); process(psiClass, substitutor, shouldProcessDeprecated, processedWithoutDeprecated, processedAll, result, classToDelegateTo, keepParameterAnnotationsNew); } private static void collectMethods(PsiClass currentClass, PsiSubstitutor currentClassSubstitutor, boolean shouldProcessDeprecated, GrTypeDefinition classToDelegateTo, Collection<PsiMethod> collector, boolean keepParameterAnnotations) { final List<PsiMethod> methods; if (currentClass instanceof GrTypeDefinition) { methods = new ArrayList<PsiMethod>(); GrClassImplUtil.collectMethodsFromBody((GrTypeDefinition)currentClass, methods); } else { methods = Arrays.asList(currentClass.getMethods()); } for (PsiMethod method : methods) { if (method.isConstructor() || method.hasModifierProperty(PsiModifier.STATIC)) continue; if (overridesObjectOrGroovyObject(method)) continue; if (!shouldProcessDeprecated && PsiImplUtil.getAnnotation(method, CommonClassNames.JAVA_LANG_DEPRECATED) != null) continue; collector.add(generateDelegateMethod(method, classToDelegateTo, currentClassSubstitutor, keepParameterAnnotations)); } } private static boolean overridesObjectOrGroovyObject(PsiMethod method) { final String name = method.getName(); if (!OBJECT_METHODS.contains(name) && !GROOVY_OBJECT_METHODS.contains(name)) return false; final PsiMethod superMethod = PsiSuperMethodImplUtil.findDeepestSuperMethod(method); if (superMethod == null) return false; final PsiClass superClass = superMethod.getContainingClass(); if (superClass == null) return false; final String qname = superClass.getQualifiedName(); return CommonClassNames.JAVA_LANG_OBJECT.equals(qname) || GroovyCommonClassNames.GROOVY_OBJECT.equals(qname); } private static boolean shouldDelegateDeprecated(PsiAnnotation delegate) { final Boolean result = GrAnnotationUtil.inferBooleanAttribute(delegate, "deprecated"); return result != null && result.booleanValue(); } private static boolean shouldKeepParameterAnnotations(PsiAnnotation delegate) { final Boolean keepParameterAnnotations = GrAnnotationUtil.inferBooleanAttribute(delegate, "parameterAnnotations"); return keepParameterAnnotations != null && keepParameterAnnotations.booleanValue(); } private static PsiMethod generateDelegateMethod(PsiMethod method, PsiClass superClass, PsiSubstitutor substitutor, boolean keepParameterAnnotations) { final LightMethodBuilder builder = new LightMethodBuilder(superClass.getManager(), GroovyLanguage.INSTANCE, method.getName()); builder.setContainingClass(superClass); builder.setMethodReturnType(substitutor.substitute(method.getReturnType())); builder.setNavigationElement(method); builder.addModifier(PsiModifier.PUBLIC); final PsiTypeParameter[] typeParameters = method.getTypeParameters(); final PsiClass containingClass = method.getContainingClass(); boolean isRaw = containingClass != null && PsiUtil.isRawSubstitutor(containingClass, substitutor); if (isRaw) { substitutor = JavaPsiFacade.getInstance(method.getProject()).getElementFactory().createRawSubstitutor(substitutor, typeParameters); } if (!isRaw) { for (PsiTypeParameter typeParameter : typeParameters) { builder.addTypeParameter(typeParameter); } } final PsiParameter[] originalParameters = method.getParameterList().getParameters(); for (int i = 0; i < originalParameters.length; i++) { PsiParameter originalParameter = originalParameters[i]; PsiType type; if (isRaw) { type = TypeConversionUtil.erasure(substitutor.substitute(originalParameter.getType())); } else { type = substitutor.substitute(originalParameter.getType()); } if (type == null) { type = TypesUtil.getJavaLangObject(superClass); } final LightParameter lightParameter = new LightParameter(StringUtil.notNullize(originalParameter.getName(), "p" + i), type, builder, JavaLanguage.INSTANCE); if (keepParameterAnnotations) { final PsiCompositeModifierList delegatingModifierList = new PsiCompositeModifierList(method.getManager(), Collections.singletonList(originalParameter.getModifierList())); lightParameter.setModifierList(delegatingModifierList); } builder.addParameter(lightParameter); } builder.setBaseIcon(JetgroovyIcons.Groovy.Method); return new DelegatedMethod(builder, method); } private static final Set<String> OBJECT_METHODS = ContainerUtil.newHashSet("equals", "hashCode", "getClass", "clone", "toString", "notify", "notifyAll", "wait", "finalize"); private static final Set<String> GROOVY_OBJECT_METHODS = ContainerUtil.newHashSet("invokeMethod", "getProperty", "setProperty", "getMetaClass", "setMetaClass"); private static void doCollectImplementsTypes(GrTypeDefinition grType, Collection<PsiClassType> result) { final GrField[] fields = grType.getCodeFields(); for (GrField field : fields) { final PsiAnnotation delegate = PsiImplUtil.getAnnotation(field, GroovyCommonClassNames.GROOVY_LANG_DELEGATE); if (delegate == null) continue; final boolean shouldImplement = shouldImplementDelegatedInterfaces(delegate); if (!shouldImplement) continue; final PsiType type = field.getDeclaredType(); if (!(type instanceof PsiClassType)) continue; final PsiClass psiClass = ((PsiClassType)type).resolve(); if (psiClass == null) continue; result.addAll(Arrays.asList(psiClass.getImplementsListTypes())); if (psiClass.isInterface()) { result.add((PsiClassType)type); } } } private static boolean shouldImplementDelegatedInterfaces(PsiAnnotation delegate) { final Boolean result = GrAnnotationUtil.inferBooleanAttribute(delegate, "interfaces"); return result == null || result.booleanValue(); } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v9.resources; import com.google.api.pathtemplate.PathTemplate; import com.google.api.resourcenames.ResourceName; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. @Generated("by gapic-generator-java") public class AdGroupFeedName implements ResourceName { private static final PathTemplate CUSTOMER_ID_AD_GROUP_ID_FEED_ID = PathTemplate.createWithoutUrlEncoding( "customers/{customer_id}/adGroupFeeds/{ad_group_id}~{feed_id}"); private volatile Map<String, String> fieldValuesMap; private final String customerId; private final String adGroupId; private final String feedId; @Deprecated protected AdGroupFeedName() { customerId = null; adGroupId = null; feedId = null; } private AdGroupFeedName(Builder builder) { customerId = Preconditions.checkNotNull(builder.getCustomerId()); adGroupId = Preconditions.checkNotNull(builder.getAdGroupId()); feedId = Preconditions.checkNotNull(builder.getFeedId()); } public String getCustomerId() { return customerId; } public String getAdGroupId() { return adGroupId; } public String getFeedId() { return feedId; } public static Builder newBuilder() { return new Builder(); } public Builder toBuilder() { return new Builder(this); } public static AdGroupFeedName of(String customerId, String adGroupId, String feedId) { return newBuilder().setCustomerId(customerId).setAdGroupId(adGroupId).setFeedId(feedId).build(); } public static String format(String customerId, String adGroupId, String feedId) { return newBuilder() .setCustomerId(customerId) .setAdGroupId(adGroupId) .setFeedId(feedId) .build() .toString(); } public static AdGroupFeedName parse(String formattedString) { if (formattedString.isEmpty()) { return null; } Map<String, String> matchMap = CUSTOMER_ID_AD_GROUP_ID_FEED_ID.validatedMatch( formattedString, "AdGroupFeedName.parse: formattedString not in valid format"); return of(matchMap.get("customer_id"), matchMap.get("ad_group_id"), matchMap.get("feed_id")); } public static List<AdGroupFeedName> parseList(List<String> formattedStrings) { List<AdGroupFeedName> list = new ArrayList<>(formattedStrings.size()); for (String formattedString : formattedStrings) { list.add(parse(formattedString)); } return list; } public static List<String> toStringList(List<AdGroupFeedName> values) { List<String> list = new ArrayList<>(values.size()); for (AdGroupFeedName value : values) { if (value == null) { list.add(""); } else { list.add(value.toString()); } } return list; } public static boolean isParsableFrom(String formattedString) { return CUSTOMER_ID_AD_GROUP_ID_FEED_ID.matches(formattedString); } @Override public Map<String, String> getFieldValuesMap() { if (fieldValuesMap == null) { synchronized (this) { if (fieldValuesMap == null) { ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder(); if (customerId != null) { fieldMapBuilder.put("customer_id", customerId); } if (adGroupId != null) { fieldMapBuilder.put("ad_group_id", adGroupId); } if (feedId != null) { fieldMapBuilder.put("feed_id", feedId); } fieldValuesMap = fieldMapBuilder.build(); } } } return fieldValuesMap; } public String getFieldValue(String fieldName) { return getFieldValuesMap().get(fieldName); } @Override public String toString() { return CUSTOMER_ID_AD_GROUP_ID_FEED_ID.instantiate( "customer_id", customerId, "ad_group_id", adGroupId, "feed_id", feedId); } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o != null || getClass() == o.getClass()) { AdGroupFeedName that = ((AdGroupFeedName) o); return Objects.equals(this.customerId, that.customerId) && Objects.equals(this.adGroupId, that.adGroupId) && Objects.equals(this.feedId, that.feedId); } return false; } @Override public int hashCode() { int h = 1; h *= 1000003; h ^= Objects.hashCode(customerId); h *= 1000003; h ^= Objects.hashCode(adGroupId); h *= 1000003; h ^= Objects.hashCode(feedId); return h; } /** Builder for customers/{customer_id}/adGroupFeeds/{ad_group_id}~{feed_id}. */ public static class Builder { private String customerId; private String adGroupId; private String feedId; protected Builder() {} public String getCustomerId() { return customerId; } public String getAdGroupId() { return adGroupId; } public String getFeedId() { return feedId; } public Builder setCustomerId(String customerId) { this.customerId = customerId; return this; } public Builder setAdGroupId(String adGroupId) { this.adGroupId = adGroupId; return this; } public Builder setFeedId(String feedId) { this.feedId = feedId; return this; } private Builder(AdGroupFeedName adGroupFeedName) { this.customerId = adGroupFeedName.customerId; this.adGroupId = adGroupFeedName.adGroupId; this.feedId = adGroupFeedName.feedId; } public AdGroupFeedName build() { return new AdGroupFeedName(this); } } }
package water.network; import water.H2O; import water.util.Log; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLEngineResult; import javax.net.ssl.SSLException; import javax.net.ssl.SSLSession; import java.io.IOException; import java.net.SocketException; import java.nio.ByteBuffer; import java.nio.channels.ByteChannel; import java.nio.channels.SocketChannel; /** * This class is based on: * <a href="https://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/JSSERefGuide.html">Oracle's JSSE guide.</a> * <a href="https://docs.oracle.com/javase/8/docs/technotes/guides/security/jsse/samples/sslengine/SSLEngineSimpleDemo.java">Oracle's SSLEngine demo.</a> * * It's a simple wrapper around SocketChannels which enables SSL/TLS * communication using {@link javax.net.ssl.SSLEngine}. */ class SSLSocketChannel implements ByteChannel { // Empty buffer for handshakes private static final ByteBuffer EMPTY_BUFFER = ByteBuffer.allocate(0); // Buffer holding encrypted outgoing data private ByteBuffer netInBuffer; // Buffer holding encrypted incoming data private ByteBuffer netOutBuffer; // Buffer holding decrypted incoming data private ByteBuffer peerAppData; private SocketChannel channel = null; private SSLEngine sslEngine = null; private boolean closing = false; private boolean closed = false; private boolean handshakeComplete = false; SSLSocketChannel(SocketChannel channel, SSLEngine sslEngine) throws IOException { this.channel = channel; this.sslEngine = sslEngine; sslEngine.setEnableSessionCreation(true); SSLSession session = sslEngine.getSession(); prepareBuffers(session); handshake(); } @Override public boolean isOpen() { return channel.isOpen(); } @Override public void close() throws IOException { closing = true; sslEngine.closeOutbound(); sslEngine.getSession().invalidate(); netOutBuffer.clear(); channel.close(); closed = true; } private void prepareBuffers(SSLSession session) throws SocketException { int appBufferSize = session.getApplicationBufferSize(); // Less is not more. More is more. Bigger than the app buffer size so successful unwraps() don't cause BUFFER_OVERFLOW // Value 64 was based on other frameworks using it and some manual testing. Might require tuning in the future. peerAppData = ByteBuffer.allocate(appBufferSize + 64); int netBufferSize = session.getPacketBufferSize(); netInBuffer = ByteBuffer.allocate(netBufferSize); netOutBuffer = ByteBuffer.allocate(netBufferSize); } // ----------------------------------------------------------- // HANDSHAKE // ----------------------------------------------------------- private SSLEngineResult.HandshakeStatus hs; private void handshake() throws IOException { Log.debug("Starting SSL handshake..."); sslEngine.beginHandshake(); hs = sslEngine.getHandshakeStatus(); SSLEngineResult initHandshakeStatus; while (!handshakeComplete) { switch (hs) { case NOT_HANDSHAKING: { //should never happen throw new IOException("NOT_HANDSHAKING during handshake"); } case FINISHED: handshakeComplete = !netOutBuffer.hasRemaining(); break; case NEED_WRAP: { initHandshakeStatus = handshakeWrap(); if ( initHandshakeStatus.getStatus() == SSLEngineResult.Status.OK ){ if (hs == SSLEngineResult.HandshakeStatus.NEED_TASK) { tasks(); } } break; } case NEED_UNWRAP: { initHandshakeStatus = handshakeUnwrap(); if ( initHandshakeStatus.getStatus() == SSLEngineResult.Status.OK ){ if (hs == SSLEngineResult.HandshakeStatus.NEED_TASK) { tasks(); } } break; } // SSL needs to perform some delegating tasks before it can continue. // Those tasks will be run in the same thread and can be blocking. case NEED_TASK: tasks(); break; } } Log.debug("SSL handshake finished successfully!"); } private synchronized SSLEngineResult handshakeWrap() throws IOException { netOutBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(EMPTY_BUFFER, netOutBuffer); netOutBuffer.flip(); hs = wrapResult.getHandshakeStatus(); channel.write(netOutBuffer); return wrapResult; } private synchronized SSLEngineResult handshakeUnwrap() throws IOException { if (netInBuffer.position() == netInBuffer.limit()) { netInBuffer.clear(); } channel.read(netInBuffer); SSLEngineResult unwrapResult; peerAppData.clear(); do { netInBuffer.flip(); unwrapResult = sslEngine.unwrap(netInBuffer, peerAppData); netInBuffer.compact(); hs = unwrapResult.getHandshakeStatus(); switch (unwrapResult.getStatus()) { case OK: case BUFFER_UNDERFLOW: { if (unwrapResult.getHandshakeStatus() == SSLEngineResult.HandshakeStatus.NEED_TASK) { tasks(); } break; } case BUFFER_OVERFLOW: { int applicationBufferSize = sslEngine.getSession().getApplicationBufferSize(); if (applicationBufferSize > peerAppData.capacity()) { ByteBuffer b = ByteBuffer.allocate(applicationBufferSize + peerAppData.position()); peerAppData.flip(); b.put(peerAppData); peerAppData = b; } else { peerAppData.compact(); } break; } default: throw new IOException("Failed to SSL unwrap with status " + unwrapResult.getStatus()); } } while(unwrapResult.getStatus() == SSLEngineResult.Status.OK && hs == SSLEngineResult.HandshakeStatus.NEED_UNWRAP); return unwrapResult; } // ----------------------------------------------------------- // READ AND WRITE // ----------------------------------------------------------- @Override public int read(ByteBuffer dst) throws IOException { if (closing || closed) return -1; return unwrap(dst); } private synchronized int unwrap(ByteBuffer dst) throws IOException { int read = 0; // We have outstanding data in our incoming decrypted buffer, use that data first to fill dst if(!dst.hasRemaining()) { return 0; } if(peerAppData.position() != 0) { read += copy(peerAppData, dst); return read; } if(netInBuffer.position() == 0) { channel.read(netInBuffer); } while(netInBuffer.position() != 0) { netInBuffer.flip(); // We still might have left data here if dst was smaller than the amount of data in peerAppData if(peerAppData.position() != 0) { peerAppData.compact(); } SSLEngineResult unwrapResult = sslEngine.unwrap(netInBuffer, peerAppData); switch (unwrapResult.getStatus()) { case OK: { unwrapResult.bytesProduced(); if (unwrapResult.getHandshakeStatus() == SSLEngineResult.HandshakeStatus.NEED_TASK) tasks(); break; } case BUFFER_OVERFLOW: { int applicationBufferSize = sslEngine.getSession().getApplicationBufferSize(); if (applicationBufferSize > peerAppData.capacity()) { int appSize = applicationBufferSize; ByteBuffer b = ByteBuffer.allocate(appSize + peerAppData.position()); peerAppData.flip(); b.put(peerAppData); peerAppData = b; } else { // We tried to unwrap data into peerAppData which means there's leftover in netInBuffer // the upcoming read should read int potential new data after the leftover netInBuffer.position(netInBuffer.limit()); netInBuffer.limit(netInBuffer.capacity()); peerAppData.compact(); if(!dst.hasRemaining()) { return read; } } break; } case BUFFER_UNDERFLOW: { int packetBufferSize = sslEngine.getSession().getPacketBufferSize(); if (packetBufferSize > netInBuffer.capacity()) { int netSize = packetBufferSize; if (netSize > netInBuffer.capacity()) { ByteBuffer b = ByteBuffer.allocate(netSize); netInBuffer.flip(); b.put(netInBuffer); netInBuffer = b; } } else { // We have some leftover data from unwrap but no enough. // We need to read in more data from the socket AFTER the current data. netInBuffer.position(netInBuffer.limit()); netInBuffer.limit(netInBuffer.capacity()); channel.read(netInBuffer); continue; } break; } default: throw new IOException("Failed to SSL unwrap with status " + unwrapResult.getStatus()); } if (peerAppData != dst && dst.hasRemaining()) { peerAppData.flip(); read += copy(peerAppData, dst); if(!dst.hasRemaining()) { netInBuffer.compact(); return read; } } netInBuffer.compact(); } return read; } private int copy(ByteBuffer src, ByteBuffer dst) { int toCopy = Math.min(src.remaining(), dst.remaining()); dst.put(src.array(), src.position(), toCopy); src.position(src.position() + toCopy); if(!src.hasRemaining()) { src.clear(); } return toCopy; } @Override public int write(ByteBuffer src) throws IOException { if(closing || closed) { throw new IOException("Cannot perform socket write, the socket is closed (or being closed)."); } int wrote = 0; // src can be much bigger than what our SSL session allows to send in one go while (src.hasRemaining()) { netOutBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(src, netOutBuffer); netOutBuffer.flip(); if (wrapResult.getStatus() == SSLEngineResult.Status.OK) { if (wrapResult.getHandshakeStatus() == SSLEngineResult.HandshakeStatus.NEED_TASK) tasks(); } while (netOutBuffer.hasRemaining()) { wrote += channel.write(netOutBuffer); } } return wrote; } // ----------------------------------------------------------- // MISC // ----------------------------------------------------------- private void tasks() { Runnable r; while ( (r = sslEngine.getDelegatedTask()) != null) { r.run(); } hs = sslEngine.getHandshakeStatus(); } public SocketChannel channel() { return channel; } SSLEngine getEngine() { return sslEngine; } boolean isHandshakeComplete() { return handshakeComplete; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.client.solrj.io.stream; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.HashKey; import org.apache.solr.client.solrj.io.comp.StreamComparator; import org.apache.solr.client.solrj.io.eq.FieldEqualitor; import org.apache.solr.client.solrj.io.eq.MultipleFieldEqualitor; import org.apache.solr.client.solrj.io.eq.StreamEqualitor; import org.apache.solr.client.solrj.io.stream.expr.Explanation; import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType; import org.apache.solr.client.solrj.io.stream.expr.Expressible; import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation; import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter; import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; import org.apache.solr.client.solrj.io.stream.metrics.Bucket; import org.apache.solr.client.solrj.io.stream.metrics.Metric; public class HashRollupStream extends TupleStream implements Expressible { private static final long serialVersionUID = 1; private PushBackStream tupleStream; private Bucket[] buckets; private Metric[] metrics; private Iterator<Tuple> tupleIterator; public HashRollupStream(TupleStream tupleStream, Bucket[] buckets, Metric[] metrics) { init(tupleStream, buckets, metrics); } public HashRollupStream(StreamExpression expression, StreamFactory factory) throws IOException { // grab all parameters out List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes( expression, Expressible.class, TupleStream.class); List<StreamExpression> metricExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, Metric.class); StreamExpressionNamedParameter overExpression = factory.getNamedOperand(expression, "over"); // validate expression contains only what we want. if (expression.getParameters().size() != streamExpressions.size() + metricExpressions.size() + 1) { throw new IOException( String.format(Locale.ROOT, "Invalid expression %s - unknown operands found", expression)); } if (1 != streamExpressions.size()) { throw new IOException( String.format( Locale.ROOT, "Invalid expression %s - expecting a single stream but found %d", expression, streamExpressions.size())); } if (null == overExpression || !(overExpression.getParameter() instanceof StreamExpressionValue)) { throw new IOException( String.format( Locale.ROOT, "Invalid expression %s - expecting single 'over' parameter listing fields to rollup by but didn't find one", expression)); } // Construct the metrics Metric[] metrics = new Metric[metricExpressions.size()]; for (int idx = 0; idx < metricExpressions.size(); ++idx) { metrics[idx] = factory.constructMetric(metricExpressions.get(idx)); } // Construct the buckets. // Buckets are nothing more than equalitors (I think). We can use equalitors as helpers for // creating the buckets, but because I feel I'm missing something wrt buckets I don't want to // change the use of buckets in this class to instead be equalitors. StreamEqualitor streamEqualitor = factory.constructEqualitor( ((StreamExpressionValue) overExpression.getParameter()).getValue(), FieldEqualitor.class); List<FieldEqualitor> flattenedEqualitors = flattenEqualitor(streamEqualitor); Bucket[] buckets = new Bucket[flattenedEqualitors.size()]; for (int idx = 0; idx < flattenedEqualitors.size(); ++idx) { buckets[idx] = new Bucket(flattenedEqualitors.get(idx).getLeftFieldName()); // while we're using equalitors we don't support the form a=b. Only single field names. } init(factory.constructStream(streamExpressions.get(0)), buckets, metrics); } private List<FieldEqualitor> flattenEqualitor(StreamEqualitor equalitor) { List<FieldEqualitor> flattenedList = new ArrayList<>(); if (equalitor instanceof FieldEqualitor) { flattenedList.add((FieldEqualitor) equalitor); } else if (equalitor instanceof MultipleFieldEqualitor) { MultipleFieldEqualitor mEqualitor = (MultipleFieldEqualitor) equalitor; for (StreamEqualitor subEqualitor : mEqualitor.getEqs()) { flattenedList.addAll(flattenEqualitor(subEqualitor)); } } return flattenedList; } private void init(TupleStream tupleStream, Bucket[] buckets, Metric[] metrics) { this.tupleStream = new PushBackStream(tupleStream); this.buckets = buckets; this.metrics = metrics; } @Override public StreamExpression toExpression(StreamFactory factory) throws IOException { return toExpression(factory, true); } private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException { // function name StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass())); // stream if (includeStreams) { expression.addParameter(tupleStream.toExpression(factory)); } else { expression.addParameter("<stream>"); } // over StringBuilder overBuilder = new StringBuilder(); for (Bucket bucket : buckets) { if (overBuilder.length() > 0) { overBuilder.append(","); } overBuilder.append(bucket.toString()); } expression.addParameter(new StreamExpressionNamedParameter("over", overBuilder.toString())); // metrics for (Metric metric : metrics) { expression.addParameter(metric.toExpression(factory)); } return expression; } @Override public Explanation toExplanation(StreamFactory factory) throws IOException { Explanation explanation = new StreamExplanation(getStreamNodeId().toString()) .withChildren(new Explanation[] {tupleStream.toExplanation(factory)}) .withFunctionName(factory.getFunctionName(this.getClass())) .withImplementingClass(this.getClass().getName()) .withExpressionType(ExpressionType.STREAM_DECORATOR) .withExpression(toExpression(factory, false).toString()); for (Metric metric : metrics) { explanation.withHelper(metric.toExplanation(factory)); } return explanation; } public void setStreamContext(StreamContext context) { this.tupleStream.setStreamContext(context); } public List<TupleStream> children() { List<TupleStream> l = new ArrayList<TupleStream>(); l.add(tupleStream); return l; } public void open() throws IOException { tupleStream.open(); } public void close() throws IOException { tupleStream.close(); tupleIterator = null; } public Tuple read() throws IOException { // On the first call to read build the tupleIterator. if (tupleIterator == null) { Map<HashKey, Metric[]> metricMap = new HashMap<>(); while (true) { Tuple tuple = tupleStream.read(); if (tuple.EOF) { List<Tuple> tuples = new ArrayList<>(); for (Map.Entry<HashKey, Metric[]> entry : metricMap.entrySet()) { Tuple t = new Tuple(); Metric[] finishedMetrics = entry.getValue(); for (Metric metric : finishedMetrics) { t.put(metric.getIdentifier(), metric.getValue()); } HashKey hashKey = entry.getKey(); for (int i = 0; i < buckets.length; i++) { t.put(buckets[i].toString(), hashKey.getParts()[i]); } tuples.add(t); } tuples.add(tuple); this.tupleIterator = tuples.iterator(); break; } Object[] bucketValues = new Object[buckets.length]; for (int i = 0; i < buckets.length; i++) { bucketValues[i] = buckets[i].getBucketValue(tuple); } HashKey hashKey = new HashKey(bucketValues); Metric[] currentMetrics = metricMap.get(hashKey); if (currentMetrics != null) { for (Metric bucketMetric : currentMetrics) { bucketMetric.update(tuple); } } else { currentMetrics = new Metric[metrics.length]; for (int i = 0; i < metrics.length; i++) { Metric bucketMetric = metrics[i].newInstance(); bucketMetric.update(tuple); currentMetrics[i] = bucketMetric; } metricMap.put(hashKey, currentMetrics); } } } return tupleIterator.next(); } public int getCost() { return 0; } @Override public StreamComparator getStreamSort() { return tupleStream.getStreamSort(); } }
package com.kpi4j.appender.database; import static org.junit.Assert.assertEquals; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.Date; import org.junit.Test; import com.kpi4j.Counter; import com.kpi4j.Dimension; import com.kpi4j.ObjectType; import com.kpi4j.appender.database.JDBCAppender; import com.kpi4j.records.OBjectTypeRecord; public class TestJDBCAppender { String host="localhost"; String port="3306"; String login="root"; String password="root"; String driver="com.mysql.jdbc.Driver"; String type="mysql"; String database="kpi4j"; Connection connection; public TestJDBCAppender() throws ClassNotFoundException, SQLException{ Class.forName(driver); connection = DriverManager.getConnection("jdbc:"+type+"://"+host+":"+port+"/"+database, login,password); } @Test public void testGetPreparedStsQueryWithoutDimension() throws ClassNotFoundException{ JDBCAppender appender= new JDBCAppender(); ObjectType ot= new ObjectType(); ot.setName("ot1"); Counter c1= new Counter(); c1.setName("ctr1"); c1.setType("Integer"); ot.addCounter(c1); Counter c2= new Counter(); c2.setName("ctr2"); c2.setType("Long"); ot.addCounter(c2); Counter c3= new Counter(); c3.setName("ctr3"); c3.setType("String"); ot.addCounter(c3); String query=appender.getPreparedStsQuery(ot); assertEquals("insert into ot1 (start_date, end_date, ctr1, ctr2, ctr3) values (?,?,?,?,?)",query); } @Test public void testGetPreparedStsQueryWitDimension() throws ClassNotFoundException{ JDBCAppender appender= new JDBCAppender(); ObjectType ot= new ObjectType(); ot.setName("ot1"); Dimension dim1= new Dimension(); dim1.setName("dim1"); dim1.setType(Integer.class); ot.addDimension(dim1); Dimension dim2= new Dimension(); dim2.setName("dim2"); dim2.setType(String.class); ot.addDimension(dim2); Counter c1= new Counter(); c1.setName("ctr1"); c1.setType("Integer"); ot.addCounter(c1); Counter c2= new Counter(); c2.setName("ctr2"); c2.setType("Long"); ot.addCounter(c2); Counter c3= new Counter(); c3.setName("ctr3"); c3.setType("String"); ot.addCounter(c3); String query=appender.getPreparedStsQuery(ot); assertEquals("insert into ot1 (start_date, end_date, dim1, dim2, ctr1, ctr2, ctr3) values (?,?,?,?,?,?,?)",query); } @Test public void testExecuteBatchInsert() throws SQLException, ClassNotFoundException{ connection.createStatement().executeUpdate("drop table if exists ot1;"); connection.createStatement().executeUpdate("create table ot1 (" + "start_date datetime not null," + "end_date datetime," + "dim1 int not null," + "dim2 varchar(45) not null," + "ctr1 int," + "ctr2 bigint," + "ctr3 varchar(45)," + "primary key(start_date,dim1,dim2)" + ")"); JDBCAppender appender= new JDBCAppender(); ObjectType ot= new ObjectType(); ot.setName("ot1"); Dimension dim1= new Dimension(); dim1.setName("dim1"); dim1.setType(Integer.class); ot.addDimension(dim1); Dimension dim2= new Dimension(); dim2.setName("dim2"); dim2.setType(String.class); ot.addDimension(dim2); Counter c1= new Counter(); c1.setName("ctr1"); c1.setType("Integer"); ot.addCounter(c1); Counter c2= new Counter(); c2.setName("ctr2"); c2.setType("Long"); ot.addCounter(c2); Counter c3= new Counter(); c3.setName("ctr3"); c3.setType("String"); ot.addCounter(c3); OBjectTypeRecord rec= new OBjectTypeRecord(ot); rec.incrementCounter("dim1",1,"dim2",1,"ctr1",11); rec.incrementCounter("dim1",1,"dim2",1,"ctr2",12L); rec.setCounterValue("dim1",1,"dim2",1,"ctr3","Hello"); rec.incrementCounter("dim1",1,"dim2",2,"ctr1",11); rec.incrementCounter("dim1",1,"dim2",2,"ctr2",12L); rec.setCounterValue("dim1",1,"dim2",2,"ctr3","Hello"); rec.incrementCounter("dim1",2,"dim2",1,"ctr1",11); rec.incrementCounter("dim1",2,"dim2",1,"ctr2",12L); rec.setCounterValue("dim1",2,"dim2",1,"ctr3","Hello"); rec.incrementCounter("dim1",2,"dim2",2,"ctr1",11); rec.incrementCounter("dim1",2,"dim2",2,"ctr2",12L); rec.setCounterValue("dim1",2,"dim2",2,"ctr3","Hello"); rec.setEndTime(new Date()); PreparedStatement ps=connection.prepareStatement("insert into ot1 (start_date, end_date, dim1, dim2, ctr1, ctr2, ctr3) values (?,?,?,?,?,?,?)"); appender.executeBatchInsert(ps, rec, 0); ps.executeBatch(); } @Test public void testGetTableCreationQuery() throws ClassNotFoundException{ JDBCAppender appender= new JDBCAppender(); ObjectType ot= new ObjectType(); ot.setName("ot1"); Dimension dim1= new Dimension(); dim1.setName("dim1"); dim1.setType(Integer.class); ot.addDimension(dim1); Dimension dim2= new Dimension(); dim2.setName("dim2"); dim2.setType(String.class); ot.addDimension(dim2); Counter c1= new Counter(); c1.setName("ctr1"); c1.setType("Integer"); ot.addCounter(c1); Counter c2= new Counter(); c2.setName("ctr2"); c2.setType("Long"); ot.addCounter(c2); Counter c3= new Counter(); c3.setName("ctr3"); c3.setType("String"); ot.addCounter(c3); String query="create table if not exists ot1 (\n" + "start_date datetime NOT NULL,\n" + "end_date datetime,\n" + "dim1 INTEGER NOT NULL,\n" + "dim2 VARCHAR(255) NOT NULL,\n" + "ctr1 INTEGER,\n" + "ctr2 BIGINT,\n" + "ctr3 VARCHAR(255),\n" + "primary key(start_date, dim1, dim2)\n" + ")"; assertEquals(query,appender.getTableCreationQuery(ot)); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: tensorflow/compiler/aot/tfcompile.proto package org.tensorflow.tfcompile; /** * <pre> * Fetch represents a single fetch tensor in the graph, which corresponds to an * output argument for the generated function. * </pre> * * Protobuf type {@code tensorflow.tfcompile.Fetch} */ public final class Fetch extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:tensorflow.tfcompile.Fetch) FetchOrBuilder { // Use Fetch.newBuilder() to construct. private Fetch(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Fetch() { name_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private Fetch( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { org.tensorflow.tfcompile.TensorId.Builder subBuilder = null; if (id_ != null) { subBuilder = id_.toBuilder(); } id_ = input.readMessage(org.tensorflow.tfcompile.TensorId.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(id_); id_ = subBuilder.buildPartial(); } break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); name_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.tfcompile.CompileProtos.internal_static_tensorflow_tfcompile_Fetch_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.tfcompile.CompileProtos.internal_static_tensorflow_tfcompile_Fetch_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.tfcompile.Fetch.class, org.tensorflow.tfcompile.Fetch.Builder.class); } public static final int ID_FIELD_NUMBER = 1; private org.tensorflow.tfcompile.TensorId id_; /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public boolean hasId() { return id_ != null; } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public org.tensorflow.tfcompile.TensorId getId() { return id_ == null ? org.tensorflow.tfcompile.TensorId.getDefaultInstance() : id_; } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public org.tensorflow.tfcompile.TensorIdOrBuilder getIdOrBuilder() { return getId(); } public static final int NAME_FIELD_NUMBER = 2; private volatile java.lang.Object name_; /** * <pre> * Optional name for generated code. * </pre> * * <code>string name = 2;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * <pre> * Optional name for generated code. * </pre> * * <code>string name = 2;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (id_ != null) { output.writeMessage(1, getId()); } if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (id_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getId()); } if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.tensorflow.tfcompile.Fetch)) { return super.equals(obj); } org.tensorflow.tfcompile.Fetch other = (org.tensorflow.tfcompile.Fetch) obj; boolean result = true; result = result && (hasId() == other.hasId()); if (hasId()) { result = result && getId() .equals(other.getId()); } result = result && getName() .equals(other.getName()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasId()) { hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId().hashCode(); } hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static org.tensorflow.tfcompile.Fetch parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.tfcompile.Fetch parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.tfcompile.Fetch parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.tensorflow.tfcompile.Fetch parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.tensorflow.tfcompile.Fetch parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.tfcompile.Fetch parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.tfcompile.Fetch parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static org.tensorflow.tfcompile.Fetch parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.tensorflow.tfcompile.Fetch parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static org.tensorflow.tfcompile.Fetch parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(org.tensorflow.tfcompile.Fetch prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Fetch represents a single fetch tensor in the graph, which corresponds to an * output argument for the generated function. * </pre> * * Protobuf type {@code tensorflow.tfcompile.Fetch} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:tensorflow.tfcompile.Fetch) org.tensorflow.tfcompile.FetchOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.tensorflow.tfcompile.CompileProtos.internal_static_tensorflow_tfcompile_Fetch_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.tensorflow.tfcompile.CompileProtos.internal_static_tensorflow_tfcompile_Fetch_fieldAccessorTable .ensureFieldAccessorsInitialized( org.tensorflow.tfcompile.Fetch.class, org.tensorflow.tfcompile.Fetch.Builder.class); } // Construct using org.tensorflow.tfcompile.Fetch.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); if (idBuilder_ == null) { id_ = null; } else { id_ = null; idBuilder_ = null; } name_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.tensorflow.tfcompile.CompileProtos.internal_static_tensorflow_tfcompile_Fetch_descriptor; } public org.tensorflow.tfcompile.Fetch getDefaultInstanceForType() { return org.tensorflow.tfcompile.Fetch.getDefaultInstance(); } public org.tensorflow.tfcompile.Fetch build() { org.tensorflow.tfcompile.Fetch result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.tensorflow.tfcompile.Fetch buildPartial() { org.tensorflow.tfcompile.Fetch result = new org.tensorflow.tfcompile.Fetch(this); if (idBuilder_ == null) { result.id_ = id_; } else { result.id_ = idBuilder_.build(); } result.name_ = name_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.tensorflow.tfcompile.Fetch) { return mergeFrom((org.tensorflow.tfcompile.Fetch)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.tensorflow.tfcompile.Fetch other) { if (other == org.tensorflow.tfcompile.Fetch.getDefaultInstance()) return this; if (other.hasId()) { mergeId(other.getId()); } if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.tensorflow.tfcompile.Fetch parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.tensorflow.tfcompile.Fetch) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private org.tensorflow.tfcompile.TensorId id_ = null; private com.google.protobuf.SingleFieldBuilderV3< org.tensorflow.tfcompile.TensorId, org.tensorflow.tfcompile.TensorId.Builder, org.tensorflow.tfcompile.TensorIdOrBuilder> idBuilder_; /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public boolean hasId() { return idBuilder_ != null || id_ != null; } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public org.tensorflow.tfcompile.TensorId getId() { if (idBuilder_ == null) { return id_ == null ? org.tensorflow.tfcompile.TensorId.getDefaultInstance() : id_; } else { return idBuilder_.getMessage(); } } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public Builder setId(org.tensorflow.tfcompile.TensorId value) { if (idBuilder_ == null) { if (value == null) { throw new NullPointerException(); } id_ = value; onChanged(); } else { idBuilder_.setMessage(value); } return this; } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public Builder setId( org.tensorflow.tfcompile.TensorId.Builder builderForValue) { if (idBuilder_ == null) { id_ = builderForValue.build(); onChanged(); } else { idBuilder_.setMessage(builderForValue.build()); } return this; } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public Builder mergeId(org.tensorflow.tfcompile.TensorId value) { if (idBuilder_ == null) { if (id_ != null) { id_ = org.tensorflow.tfcompile.TensorId.newBuilder(id_).mergeFrom(value).buildPartial(); } else { id_ = value; } onChanged(); } else { idBuilder_.mergeFrom(value); } return this; } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public Builder clearId() { if (idBuilder_ == null) { id_ = null; onChanged(); } else { id_ = null; idBuilder_ = null; } return this; } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public org.tensorflow.tfcompile.TensorId.Builder getIdBuilder() { onChanged(); return getIdFieldBuilder().getBuilder(); } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ public org.tensorflow.tfcompile.TensorIdOrBuilder getIdOrBuilder() { if (idBuilder_ != null) { return idBuilder_.getMessageOrBuilder(); } else { return id_ == null ? org.tensorflow.tfcompile.TensorId.getDefaultInstance() : id_; } } /** * <code>.tensorflow.tfcompile.TensorId id = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< org.tensorflow.tfcompile.TensorId, org.tensorflow.tfcompile.TensorId.Builder, org.tensorflow.tfcompile.TensorIdOrBuilder> getIdFieldBuilder() { if (idBuilder_ == null) { idBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.tensorflow.tfcompile.TensorId, org.tensorflow.tfcompile.TensorId.Builder, org.tensorflow.tfcompile.TensorIdOrBuilder>( getId(), getParentForChildren(), isClean()); id_ = null; } return idBuilder_; } private java.lang.Object name_ = ""; /** * <pre> * Optional name for generated code. * </pre> * * <code>string name = 2;</code> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Optional name for generated code. * </pre> * * <code>string name = 2;</code> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Optional name for generated code. * </pre> * * <code>string name = 2;</code> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * <pre> * Optional name for generated code. * </pre> * * <code>string name = 2;</code> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <pre> * Optional name for generated code. * </pre> * * <code>string name = 2;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:tensorflow.tfcompile.Fetch) } // @@protoc_insertion_point(class_scope:tensorflow.tfcompile.Fetch) private static final org.tensorflow.tfcompile.Fetch DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new org.tensorflow.tfcompile.Fetch(); } public static org.tensorflow.tfcompile.Fetch getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Fetch> PARSER = new com.google.protobuf.AbstractParser<Fetch>() { public Fetch parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Fetch(input, extensionRegistry); } }; public static com.google.protobuf.Parser<Fetch> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Fetch> getParserForType() { return PARSER; } public org.tensorflow.tfcompile.Fetch getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.rest.service.api.history; import java.util.Date; import java.util.List; import org.flowable.rest.api.PaginateRequest; import org.flowable.rest.service.api.engine.variable.QueryVariable; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo.Id; /** * @author Tijs Rademakers */ public class HistoricTaskInstanceQueryRequest extends PaginateRequest { private String taskId; private String processInstanceId; private String processBusinessKey; private String processBusinessKeyLike; private String processDefinitionId; private String processDefinitionKey; private String processDefinitionKeyLike; private String processDefinitionName; private String processDefinitionNameLike; private String executionId; private String taskName; private String taskNameLike; private String taskDescription; private String taskDescriptionLike; private String taskDefinitionKey; private String taskDefinitionKeyLike; private String taskCategory; private String taskDeleteReason; private String taskDeleteReasonLike; private String taskAssignee; private String taskAssigneeLike; private String taskOwner; private String taskOwnerLike; private String taskInvolvedUser; private Integer taskPriority; private Integer taskMinPriority; private Integer taskMaxPriority; private Boolean finished; private Boolean processFinished; private String parentTaskId; private Date dueDate; private Date dueDateAfter; private Date dueDateBefore; private Boolean withoutDueDate; private Date taskCreatedOn; private Date taskCreatedBefore; private Date taskCreatedAfter; private Date taskCompletedOn; private Date taskCompletedBefore; private Date taskCompletedAfter; private Boolean includeTaskLocalVariables; private Boolean includeProcessVariables; private List<QueryVariable> taskVariables; private List<QueryVariable> processVariables; private String tenantId; private String tenantIdLike; private Boolean withoutTenantId; private String taskCandidateGroup; public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getProcessInstanceId() { return processInstanceId; } public void setProcessInstanceId(String processInstanceId) { this.processInstanceId = processInstanceId; } public String getProcessBusinessKey() { return processBusinessKey; } public String getProcessBusinessKeyLike() { return processBusinessKeyLike; } public void setProcessBusinessKeyLike(String processBusinessKeyLike) { this.processBusinessKeyLike = processBusinessKeyLike; } public void setProcessBusinessKey(String processBusinessKey) { this.processBusinessKey = processBusinessKey; } public String getProcessDefinitionId() { return processDefinitionId; } public void setProcessDefinitionId(String processDefinitionId) { this.processDefinitionId = processDefinitionId; } public String getProcessDefinitionKey() { return processDefinitionKey; } public void setProcessDefinitionKey(String processDefinitionKey) { this.processDefinitionKey = processDefinitionKey; } public String getProcessDefinitionKeyLike() { return processDefinitionKeyLike; } public void setProcessDefinitionKeyLike(String processDefinitionKeyLike) { this.processDefinitionKeyLike = processDefinitionKeyLike; } public String getProcessDefinitionName() { return processDefinitionName; } public void setProcessDefinitionName(String processDefinitionName) { this.processDefinitionName = processDefinitionName; } public String getProcessDefinitionNameLike() { return processDefinitionNameLike; } public String getExecutionId() { return executionId; } public void setProcessDefinitionNameLike(String processDefinitionNameLike) { this.processDefinitionNameLike = processDefinitionNameLike; } public void setExecutionId(String executionId) { this.executionId = executionId; } public String getTaskName() { return taskName; } public void setTaskName(String taskName) { this.taskName = taskName; } public String getTaskNameLike() { return taskNameLike; } public void setTaskNameLike(String taskNameLike) { this.taskNameLike = taskNameLike; } public String getTaskDescription() { return taskDescription; } public void setTaskDescription(String taskDescription) { this.taskDescription = taskDescription; } public String getTaskDescriptionLike() { return taskDescriptionLike; } public void setTaskDescriptionLike(String taskDescriptionLike) { this.taskDescriptionLike = taskDescriptionLike; } public String getTaskDefinitionKey() { return taskDefinitionKey; } public void setTaskDefinitionKey(String taskDefinitionKey) { this.taskDefinitionKey = taskDefinitionKey; } public String getTaskDefinitionKeyLike() { return taskDefinitionKeyLike; } public void setTaskDefinitionKeyLike(String taskDefinitionKeyLike) { this.taskDefinitionKeyLike = taskDefinitionKeyLike; } public String getTaskCategory() { return taskCategory; } public void setTaskCategory(String taskCategory) { this.taskCategory = taskCategory; } public String getTaskDeleteReason() { return taskDeleteReason; } public void setTaskDeleteReason(String taskDeleteReason) { this.taskDeleteReason = taskDeleteReason; } public String getTaskDeleteReasonLike() { return taskDeleteReasonLike; } public void setTaskDeleteReasonLike(String taskDeleteReasonLike) { this.taskDeleteReasonLike = taskDeleteReasonLike; } public String getTaskAssignee() { return taskAssignee; } public void setTaskAssignee(String taskAssignee) { this.taskAssignee = taskAssignee; } public String getTaskAssigneeLike() { return taskAssigneeLike; } public void setTaskAssigneeLike(String taskAssigneeLike) { this.taskAssigneeLike = taskAssigneeLike; } public String getTaskOwner() { return taskOwner; } public void setTaskOwner(String taskOwner) { this.taskOwner = taskOwner; } public String getTaskOwnerLike() { return taskOwnerLike; } public void setTaskOwnerLike(String taskOwnerLike) { this.taskOwnerLike = taskOwnerLike; } public String getTaskInvolvedUser() { return taskInvolvedUser; } public void setTaskInvolvedUser(String taskInvolvedUser) { this.taskInvolvedUser = taskInvolvedUser; } public Integer getTaskPriority() { return taskPriority; } public void setTaskPriority(Integer taskPriority) { this.taskPriority = taskPriority; } public Integer getTaskMaxPriority() { return taskMaxPriority; } public void setTaskMaxPriority(Integer taskMaxPriority) { this.taskMaxPriority = taskMaxPriority; } public Integer getTaskMinPriority() { return taskMinPriority; } public void setTaskMinPriority(Integer taskMinPriority) { this.taskMinPriority = taskMinPriority; } public Boolean getFinished() { return finished; } public void setFinished(Boolean finished) { this.finished = finished; } public Boolean getProcessFinished() { return processFinished; } public void setProcessFinished(Boolean processFinished) { this.processFinished = processFinished; } public String getParentTaskId() { return parentTaskId; } public void setParentTaskId(String parentTaskId) { this.parentTaskId = parentTaskId; } public Date getDueDate() { return dueDate; } public void setDueDate(Date dueDate) { this.dueDate = dueDate; } public Date getDueDateAfter() { return dueDateAfter; } public void setDueDateAfter(Date dueDateAfter) { this.dueDateAfter = dueDateAfter; } public Date getDueDateBefore() { return dueDateBefore; } public void setDueDateBefore(Date dueDateBefore) { this.dueDateBefore = dueDateBefore; } public Boolean getWithoutDueDate() { return withoutDueDate; } public void setWithoutDueDate(Boolean withoutDueDate) { this.withoutDueDate = withoutDueDate; } public Date getTaskCreatedOn() { return taskCreatedOn; } public void setTaskCreatedOn(Date taskCreatedOn) { this.taskCreatedOn = taskCreatedOn; } public void setTaskCreatedAfter(Date taskCreatedAfter) { this.taskCreatedAfter = taskCreatedAfter; } public Date getTaskCompletedAfter() { return taskCompletedAfter; } public void setTaskCompletedAfter(Date taskCompletedAfter) { this.taskCompletedAfter = taskCompletedAfter; } public Date getTaskCompletedBefore() { return taskCompletedBefore; } public void setTaskCompletedBefore(Date taskCompletedBefore) { this.taskCompletedBefore = taskCompletedBefore; } public Date getTaskCompletedOn() { return taskCompletedOn; } public void setTaskCompletedOn(Date taskCompletedOn) { this.taskCompletedOn = taskCompletedOn; } public Date getTaskCreatedAfter() { return taskCreatedAfter; } public void setTaskCreatedBefore(Date taskCreatedBefore) { this.taskCreatedBefore = taskCreatedBefore; } public Date getTaskCreatedBefore() { return taskCreatedBefore; } public Boolean getIncludeTaskLocalVariables() { return includeTaskLocalVariables; } public void setIncludeTaskLocalVariables(Boolean includeTaskLocalVariables) { this.includeTaskLocalVariables = includeTaskLocalVariables; } public Boolean getIncludeProcessVariables() { return includeProcessVariables; } public void setIncludeProcessVariables(Boolean includeProcessVariables) { this.includeProcessVariables = includeProcessVariables; } @JsonTypeInfo(use = Id.CLASS, defaultImpl = QueryVariable.class) public List<QueryVariable> getTaskVariables() { return taskVariables; } public void setTaskVariables(List<QueryVariable> taskVariables) { this.taskVariables = taskVariables; } @JsonTypeInfo(use = Id.CLASS, defaultImpl = QueryVariable.class) public List<QueryVariable> getProcessVariables() { return processVariables; } public void setProcessVariables(List<QueryVariable> processVariables) { this.processVariables = processVariables; } public String getTenantId() { return tenantId; } public void setTenantId(String tenantId) { this.tenantId = tenantId; } public String getTenantIdLike() { return tenantIdLike; } public void setTenantIdLike(String tenantIdLike) { this.tenantIdLike = tenantIdLike; } public Boolean getWithoutTenantId() { return withoutTenantId; } public void setWithoutTenantId(Boolean withoutTenantId) { this.withoutTenantId = withoutTenantId; } public String getTaskCandidateGroup() { return taskCandidateGroup; } public void setTaskCandidateGroup(String taskCandidateGroup) { this.taskCandidateGroup = taskCandidateGroup; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.fetch.subphase; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.Function; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class InnerHitsIT extends ESIntegTestCase { @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, CustomScriptPlugin.class); } public static class CustomScriptPlugin extends MockScriptPlugin { @Override protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() { return Collections.singletonMap("5", script -> "5"); } } public void testSimpleNested() throws Exception { assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject().startObject("article") .startObject("properties") .startObject("comments") .field("type", "nested") .startObject("properties") .startObject("message") .field("type", "text") .field("fielddata", true) .endObject() .endObject() .endObject() .startObject("title") .field("type", "text") .endObject() .endObject().endObject().endObject())); List<IndexRequestBuilder> requests = new ArrayList<>(); requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() .field("title", "quick brown fox") .startArray("comments") .startObject().field("message", "fox eat quick").endObject() .startObject().field("message", "fox ate rabbit x y z").endObject() .startObject().field("message", "rabbit got away").endObject() .endArray() .endObject())); requests.add(client().prepareIndex("articles", "article", "2").setSource(jsonBuilder().startObject() .field("title", "big gray elephant") .startArray("comments") .startObject().field("message", "elephant captured").endObject() .startObject().field("message", "mice squashed by elephant x").endObject() .startObject().field("message", "elephant scared by mice x y").endObject() .endArray() .endObject())); indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg) .innerHit(new InnerHitBuilder("comment")) ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); assertThat(innerHits.getTotalHits().value, equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(2)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(innerHits.getAt(1).getId(), equalTo("1")); assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1)); response = client().prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg) .innerHit(new InnerHitBuilder("comment")) ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getShard(), notNullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); assertThat(innerHits.getTotalHits().value, equalTo(3L)); assertThat(innerHits.getHits().length, equalTo(3)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(innerHits.getAt(1).getId(), equalTo("2")); assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1)); assertThat(innerHits.getAt(2).getId(), equalTo("2")); assertThat(innerHits.getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(2).getNestedIdentity().getOffset(), equalTo(2)); response = client().prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message")) .setExplain(true) .addDocValueField("comments.message") .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) .setSize(1))).get(); assertNoFailures(response); innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); assertThat(innerHits.getTotalHits().value, equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick")); assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(comments.message:fox in")); assertThat(innerHits.getAt(0).getFields().get("comments.message").getValue().toString(), equalTo("eat")); assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5")); } public void testRandomNested() throws Exception { assertAcked(prepareCreate("idx").addMapping("type", "field1", "type=nested", "field2", "type=nested")); int numDocs = scaledRandomIntBetween(25, 100); List<IndexRequestBuilder> requestBuilders = new ArrayList<>(); int[] field1InnerObjects = new int[numDocs]; int[] field2InnerObjects = new int[numDocs]; for (int i = 0; i < numDocs; i++) { int numInnerObjects = field1InnerObjects[i] = scaledRandomIntBetween(1, numDocs); XContentBuilder source = jsonBuilder().startObject() .field("foo", i) .startArray("field1"); for (int j = 0; j < numInnerObjects; j++) { source.startObject().field("x", "y").endObject(); } numInnerObjects = field2InnerObjects[i] = scaledRandomIntBetween(1, numDocs); source.endArray().startArray("field2"); for (int j = 0; j < numInnerObjects; j++) { source.startObject().field("x", "y").endObject(); } source.endArray().endObject(); requestBuilders.add(client().prepareIndex("idx", "type", Integer.toString(i)).setSource(source)); } indexRandom(true, requestBuilders); int size = randomIntBetween(0, numDocs); BoolQueryBuilder boolQuery = new BoolQueryBuilder(); boolQuery.should(nestedQuery("field1", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("a").setSize(size) .addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC)))); boolQuery.should(nestedQuery("field2", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("b") .addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC)).setSize(size))); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(boolQuery) .setSize(numDocs) .addSort("foo", SortOrder.ASC) .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, numDocs); assertThat(searchResponse.getHits().getHits().length, equalTo(numDocs)); for (int i = 0; i < numDocs; i++) { SearchHit searchHit = searchResponse.getHits().getAt(i); assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); assertThat(inner.getTotalHits().value, equalTo((long) field1InnerObjects[i])); for (int j = 0; j < field1InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field1")); assertThat(innerHit.getNestedIdentity().getOffset(), equalTo(j)); assertThat(innerHit.getNestedIdentity().getChild(), nullValue()); } inner = searchHit.getInnerHits().get("b"); assertThat(inner.getTotalHits().value, equalTo((long) field2InnerObjects[i])); for (int j = 0; j < field2InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field2")); assertThat(innerHit.getNestedIdentity().getOffset(), equalTo(j)); assertThat(innerHit.getNestedIdentity().getChild(), nullValue()); } } } public void testNestedMultipleLayers() throws Exception { assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject() .startObject("article").startObject("properties") .startObject("comments") .field("type", "nested") .startObject("properties") .startObject("message") .field("type", "text") .endObject() .startObject("remarks") .field("type", "nested") .startObject("properties") .startObject("message").field("type", "text").endObject() .endObject() .endObject() .endObject() .endObject() .startObject("title") .field("type", "text") .endObject() .endObject().endObject().endObject())); List<IndexRequestBuilder> requests = new ArrayList<>(); requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() .field("title", "quick brown fox") .startArray("comments") .startObject() .field("message", "fox eat quick") .startArray("remarks").startObject().field("message", "good").endObject().endArray() .endObject() .endArray() .endObject())); requests.add(client().prepareIndex("articles", "article", "2").setSource(jsonBuilder().startObject() .field("title", "big gray elephant") .startArray("comments") .startObject() .field("message", "elephant captured") .startArray("remarks").startObject().field("message", "bad").endObject().endArray() .endObject() .endArray() .endObject())); indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") .setQuery( nestedQuery("comments", nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg) .innerHit(new InnerHitBuilder("remark")), ScoreMode.Avg).innerHit(new InnerHitBuilder()) ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); // Directly refer to the second level: response = client().prepareSearch("articles") .setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg) .innerHit(new InnerHitBuilder())).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); response = client().prepareSearch("articles") .setQuery( nestedQuery("comments", nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg) .innerHit(new InnerHitBuilder("remark")), ScoreMode.Avg).innerHit(new InnerHitBuilder()) ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); assertThat(innerHits.getTotalHits().value, equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); } // Issue #9723 public void testNestedDefinedAsObject() throws Exception { assertAcked(prepareCreate("articles").addMapping("article", "comments", "type=nested", "title", "type=text")); List<IndexRequestBuilder> requests = new ArrayList<>(); requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() .field("title", "quick brown fox") .startObject("comments").field("message", "fox eat quick").endObject() .endObject())); indexRandom(true, requests); SearchResponse response = client().prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg) .innerHit(new InnerHitBuilder())) .get(); assertNoFailures(response); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue()); } public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { assertAcked(prepareCreate("articles") // number_of_shards = 1, because then we catch the expected exception in the same way. // (See expectThrows(...) below) .setSettings(Settings.builder().put("index.number_of_shards", 1)) .addMapping("article", jsonBuilder().startObject() .startObject("properties") .startObject("comments") .field("type", "object") .startObject("properties") .startObject("messages").field("type", "nested").endObject() .endObject() .endObject() .endObject() .endObject() ) ); List<IndexRequestBuilder> requests = new ArrayList<>(); requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() .field("title", "quick brown fox") .startArray("comments") .startObject() .startArray("messages") .startObject().field("message", "fox eat quick").endObject() .startObject().field("message", "bear eat quick").endObject() .endArray() .endObject() .startObject() .startArray("messages") .startObject().field("message", "no fox").endObject() .endArray() .endObject() .endArray() .endObject())); indexRandom(true, requests); Exception e = expectThrows(Exception.class, () -> client().prepareSearch("articles").setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder())).get()); assertEquals("Cannot execute inner hits. One or more parent object fields of nested field [comments.messages] are " + "not nested. All parent fields need to be nested fields too", e.getCause().getCause().getMessage()); e = expectThrows(Exception.class, () -> client().prepareSearch("articles").setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder() .setFetchSourceContext(new FetchSourceContext(true)))).get()); assertEquals("Cannot execute inner hits. One or more parent object fields of nested field [comments.messages] are " + "not nested. All parent fields need to be nested fields too", e.getCause().getCause().getMessage()); SearchResponse response = client().prepareSearch("articles") .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg) .innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(false)))).get(); assertNoFailures(response); assertHitCount(response, 1); SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); assertThat(messages.getTotalHits().value, equalTo(2L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(2)); assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); assertThat(messages.getAt(1).getId(), equalTo("1")); assertThat(messages.getAt(1).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(1).getNestedIdentity().getOffset(), equalTo(0)); assertThat(messages.getAt(1).getNestedIdentity().getChild(), nullValue()); response = client().prepareSearch("articles") .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg) .innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(false)))).get(); assertNoFailures(response); assertHitCount(response, 1); hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); messages = hit.getInnerHits().get("comments.messages"); assertThat(messages.getTotalHits().value, equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); // index the message in an object form instead of an array requests = new ArrayList<>(); requests.add(client().prepareIndex("articles", "article", "1").setSource(jsonBuilder().startObject() .field("title", "quick brown fox") .startObject("comments").startObject("messages").field("message", "fox eat quick").endObject().endObject() .endObject())); indexRandom(true, requests); response = client().prepareSearch("articles") .setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg) .innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(false)))).get(); assertNoFailures(response); assertHitCount(response, 1); hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); messages = hit.getInnerHits().get("comments.messages"); assertThat(messages.getTotalHits().value, equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue()); } public void testMatchesQueriesNestedInnerHits() throws Exception { XContentBuilder builder = jsonBuilder().startObject() .startObject("type1") .startObject("properties") .startObject("nested1") .field("type", "nested") .startObject("properties") .startObject("n_field1") .field("type", "keyword") .endObject() .endObject() .endObject() .startObject("field1") .field("type", "long") .endObject() .endObject() .endObject() .endObject(); assertAcked(prepareCreate("test").addMapping("type1", builder)); ensureGreen(); List<IndexRequestBuilder> requests = new ArrayList<>(); int numDocs = randomIntBetween(2, 35); requests.add(client().prepareIndex("test", "type1", "0").setSource(jsonBuilder().startObject() .field("field1", 0) .startArray("nested1") .startObject() .field("n_field1", "n_value1_1") .field("n_field2", "n_value2_1") .endObject() .startObject() .field("n_field1", "n_value1_2") .field("n_field2", "n_value2_2") .endObject() .endArray() .endObject())); requests.add(client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("field1", 1) .startArray("nested1") .startObject() .field("n_field1", "n_value1_8") .field("n_field2", "n_value2_5") .endObject() .startObject() .field("n_field1", "n_value1_3") .field("n_field2", "n_value2_1") .endObject() .endArray() .endObject())); for (int i = 2; i < numDocs; i++) { requests.add(client().prepareIndex("test", "type1", String.valueOf(i)).setSource(jsonBuilder().startObject() .field("field1", i) .startArray("nested1") .startObject() .field("n_field1", "n_value1_8") .field("n_field2", "n_value2_5") .endObject() .startObject() .field("n_field1", "n_value1_2") .field("n_field2", "n_value2_2") .endObject() .endArray() .endObject())); } indexRandom(true, requests); waitForRelocation(ClusterHealthStatus.GREEN); QueryBuilder query = boolQuery() .should(termQuery("nested1.n_field1", "n_value1_1").queryName("test1")) .should(termQuery("nested1.n_field1", "n_value1_3").queryName("test2")) .should(termQuery("nested1.n_field2", "n_value2_2").queryName("test3")); query = nestedQuery("nested1", query, ScoreMode.Avg).innerHit( new InnerHitBuilder().addSort(new FieldSortBuilder("nested1.n_field1").order(SortOrder.ASC))); SearchResponse searchResponse = client().prepareSearch("test") .setQuery(query) .setSize(numDocs) .addSort("field1", SortOrder.ASC) .get(); assertNoFailures(searchResponse); assertAllSuccessful(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("0")); assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1")); assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries()[0], equalTo("test3")); assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2")); for (int i = 2; i < numDocs; i++) { assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(String.valueOf(i))); assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3")); } } public void testNestedSource() throws Exception { assertAcked(prepareCreate("index1").addMapping("message", "comments", "type=nested")); client().prepareIndex("index1", "message", "1").setSource(jsonBuilder().startObject() .field("message", "quick brown fox") .startArray("comments") .startObject().field("message", "fox eat quick").field("x", "y").endObject() .startObject().field("message", "fox ate rabbit x y z").field("x", "y").endObject() .startObject().field("message", "rabbit got away").field("x", "y").endObject() .endArray() .endObject()).get(); refresh(); // the field name (comments.message) used for source filtering should be the same as when using that field for // other features (like in the query dsl or aggs) in order for consistency: SearchResponse response = client().prepareSearch() .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None) .innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(true, new String[]{"comments.message"}, null)))) .get(); assertNoFailures(response); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), equalTo("fox eat quick")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), equalTo("fox ate rabbit x y z")); response = client().prepareSearch() .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None) .innerHit(new InnerHitBuilder())) .get(); assertNoFailures(response); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), equalTo("fox eat quick")); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"), equalTo("fox ate rabbit x y z")); // Source filter on a field that does not exist inside the nested document and just check that we do not fail and // return an empty _source: response = client().prepareSearch() .setQuery(nestedQuery("comments", matchQuery("comments.message", "away"), ScoreMode.None) .innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(true, new String[]{"comments.missing_field"}, null)))) .get(); assertNoFailures(response); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0)); } public void testInnerHitsWithIgnoreUnmapped() throws Exception { assertAcked(prepareCreate("index1") .addMapping("_doc", "nested_type", "type=nested") ); createIndex("index2"); client().prepareIndex("index1", "_doc", "1").setSource("nested_type", Collections.singletonMap("key", "value")).get(); client().prepareIndex("index2", "type", "3").setSource("key", "value").get(); refresh(); SearchResponse response = client().prepareSearch("index1", "index2") .setQuery(boolQuery() .should(nestedQuery("nested_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) .innerHit(new InnerHitBuilder().setIgnoreUnmapped(true))) .should(termQuery("key", "value")) ) .get(); assertNoFailures(response); assertHitCount(response, 2); assertSearchHits(response, "1", "3"); } public void testUseMaxDocInsteadOfSize() throws Exception { assertAcked(prepareCreate("index2").addMapping("type", "nested", "type=nested")); client().admin().indices().prepareUpdateSettings("index2") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH)) .get(); client().prepareIndex("index2", "type", "1").setSource(jsonBuilder().startObject() .startArray("nested") .startObject() .field("field", "value1") .endObject() .endArray() .endObject()) .setRefreshPolicy(IMMEDIATE) .get(); QueryBuilder query = nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg) .innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)); SearchResponse response = client().prepareSearch("index2") .setQuery(query) .get(); assertNoFailures(response); assertHitCount(response, 1); } public void testTooHighResultWindow() throws Exception { assertAcked(prepareCreate("index2").addMapping("type", "nested", "type=nested")); client().prepareIndex("index2", "type", "1").setSource(jsonBuilder().startObject() .startArray("nested") .startObject() .field("field", "value1") .endObject() .endArray() .endObject()) .setRefreshPolicy(IMMEDIATE) .get(); SearchResponse response = client().prepareSearch("index2") .setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg) .innerHit(new InnerHitBuilder().setFrom(50).setSize(10).setName("_name"))) .get(); assertNoFailures(response); assertHitCount(response, 1); Exception e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("index2") .setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg) .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name"))) .get()); assertThat(e.getCause().getMessage(), containsString("the inner hit definition's [_name]'s from + size must be less than or equal to: [100] but was [110]")); e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("index2") .setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg) .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name"))) .get()); assertThat(e.getCause().getMessage(), containsString("the inner hit definition's [_name]'s from + size must be less than or equal to: [100] but was [110]")); client().admin().indices().prepareUpdateSettings("index2") .setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), 110)) .get(); response = client().prepareSearch("index2") .setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg) .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name"))) .get(); assertNoFailures(response); response = client().prepareSearch("index2") .setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg) .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name"))) .get(); assertNoFailures(response); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.server.cluster; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.ActiveMQExceptionType; import org.apache.activemq.artemis.api.core.BroadcastGroupConfiguration; import org.apache.activemq.artemis.api.core.DiscoveryGroupConfiguration; import org.apache.activemq.artemis.api.core.Interceptor; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.api.core.client.ActiveMQClient; import org.apache.activemq.artemis.core.client.impl.ServerLocatorInternal; import org.apache.activemq.artemis.core.config.BridgeConfiguration; import org.apache.activemq.artemis.core.config.ClusterConnectionConfiguration; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.filter.impl.FilterImpl; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.postoffice.PostOffice; import org.apache.activemq.artemis.core.protocol.core.Channel; import org.apache.activemq.artemis.core.protocol.core.CoreRemotingConnection; import org.apache.activemq.artemis.core.protocol.core.Packet; import org.apache.activemq.artemis.core.protocol.core.impl.PacketImpl; import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.ActiveMQExceptionMessage; import org.apache.activemq.artemis.core.server.ActiveMQComponent; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.ActiveMQServerLogger; import org.apache.activemq.artemis.core.server.NodeManager; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.transformer.Transformer; import org.apache.activemq.artemis.core.server.cluster.ha.HAManager; import org.apache.activemq.artemis.core.server.cluster.impl.BridgeImpl; import org.apache.activemq.artemis.core.server.cluster.impl.BroadcastGroupImpl; import org.apache.activemq.artemis.core.server.cluster.impl.ClusterConnectionImpl; import org.apache.activemq.artemis.core.server.cluster.qourum.QuorumManager; import org.apache.activemq.artemis.core.server.impl.Activation; import org.apache.activemq.artemis.core.server.management.ManagementService; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.spi.core.remoting.Acceptor; import org.apache.activemq.artemis.utils.ExecutorFactory; import org.apache.activemq.artemis.utils.FutureLatch; import org.apache.activemq.artemis.utils.collections.ConcurrentHashSet; import org.jboss.logging.Logger; /** * A ClusterManager manages {@link ClusterConnection}s, {@link BroadcastGroup}s and {@link Bridge}s. * <p> * Note that {@link org.apache.activemq.artemis.core.server.cluster.impl.ClusterConnectionBridge}s extend Bridges but are controlled over through * {@link ClusterConnectionImpl}. As a node is discovered a new {@link org.apache.activemq.artemis.core.server.cluster.impl.ClusterConnectionBridge} is * deployed. */ public class ClusterManager implements ActiveMQComponent { private static final Logger logger = Logger.getLogger(ClusterManager.class); private ClusterController clusterController; private HAManager haManager; private final Map<String, BroadcastGroup> broadcastGroups = new HashMap<>(); private final Map<String, Bridge> bridges = new HashMap<>(); private final ExecutorFactory executorFactory; private final ActiveMQServer server; private final PostOffice postOffice; private final ScheduledExecutorService scheduledExecutor; private ClusterConnection defaultClusterConnection; private final ManagementService managementService; private final Configuration configuration; private Set<String> protocolIgnoredAddresses = new HashSet<>(); public QuorumManager getQuorumManager() { return clusterController.getQuorumManager(); } public ClusterController getClusterController() { return clusterController; } public HAManager getHAManager() { return haManager; } public void addClusterChannelHandler(Channel channel, Acceptor acceptorUsed, CoreRemotingConnection remotingConnection, Activation activation) { clusterController.addClusterChannelHandler(channel, acceptorUsed, remotingConnection, activation); } enum State { STOPPED, /** * Used because {@link ClusterManager#stop()} method is not completely synchronized */ STOPPING, /** * Deployed means {@link ClusterManager#deploy()} was called but * {@link ClusterManager#start()} was not called. * <p> * We need the distinction if {@link ClusterManager#stop()} is called before 'start'. As * otherwise we would leak locators. */ DEPLOYED, STARTED, } private volatile State state = State.STOPPED; // the cluster connections which links this node to other cluster nodes private final Map<String, ClusterConnection> clusterConnections = new HashMap<>(); private final Set<ServerLocatorInternal> clusterLocators = new ConcurrentHashSet<>(); private final Executor executor; private final NodeManager nodeManager; public ClusterManager(final ExecutorFactory executorFactory, final ActiveMQServer server, final PostOffice postOffice, final ScheduledExecutorService scheduledExecutor, final ManagementService managementService, final Configuration configuration, final NodeManager nodeManager, final boolean useQuorumManager) { this.executorFactory = executorFactory; executor = executorFactory.getExecutor(); this.server = server; this.postOffice = postOffice; this.scheduledExecutor = scheduledExecutor; this.managementService = managementService; this.configuration = configuration; this.nodeManager = nodeManager; clusterController = new ClusterController(server, scheduledExecutor, useQuorumManager); haManager = server.getActivation().getHAManager(); } public String describe() { StringWriter str = new StringWriter(); PrintWriter out = new PrintWriter(str); out.println("Information on " + this); out.println("*******************************************************"); for (ClusterConnection conn : cloneClusterConnections()) { out.println(conn.describe()); } out.println("*******************************************************"); return str.toString(); } /** * Return the default ClusterConnection to be used case it's not defined by the acceptor * * @return default connection */ public ClusterConnection getDefaultConnection(TransportConfiguration acceptorConfig) { if (acceptorConfig == null) { // if the parameter is null, we just return whatever is defined on defaultClusterConnection return defaultClusterConnection; } else if (defaultClusterConnection != null && defaultClusterConnection.getConnector().isEquivalent(acceptorConfig)) { return defaultClusterConnection; } else { for (ClusterConnection conn : cloneClusterConnections()) { if (conn.getConnector().isEquivalent(acceptorConfig)) { return conn; } } return null; } } @Override public String toString() { return "ClusterManagerImpl[server=" + server + "]@" + System.identityHashCode(this); } public String getNodeId() { return nodeManager.getNodeId().toString(); } public String getBackupGroupName() { return server.getHAPolicy().getBackupGroupName(); } public String getScaleDownGroupName() { return server.getHAPolicy().getScaleDownGroupName(); } public synchronized void deploy() throws Exception { if (state == State.STOPPED) { state = State.DEPLOYED; } else { throw new IllegalStateException(); } for (BroadcastGroupConfiguration config : configuration.getBroadcastGroupConfigurations()) { deployBroadcastGroup(config); } for (ClusterConnectionConfiguration config : configuration.getClusterConfigurations()) { deployClusterConnection(config); } /* * only start if we are actually in a cluster * */ clusterController.start(); } @Override public synchronized void start() throws Exception { if (state == State.STARTED) { return; } for (BroadcastGroup group : broadcastGroups.values()) { try { group.start(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.unableToStartBroadcastGroup(e, group.getName()); } } for (ClusterConnection conn : clusterConnections.values()) { try { conn.start(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.unableToStartClusterConnection(e, conn.getName()); } } deployConfiguredBridges(); for (Bridge bridge : bridges.values()) { try { bridge.start(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.unableToStartBridge(e, bridge.getName()); } } //now start the ha manager haManager.start(); state = State.STARTED; } private void deployConfiguredBridges() throws Exception { for (BridgeConfiguration config : configuration.getBridgeConfigurations()) { deployBridge(config); } } @Override public void stop() throws Exception { haManager.stop(); synchronized (this) { if (state == State.STOPPED || state == State.STOPPING) { return; } state = State.STOPPING; clusterController.stop(); for (BroadcastGroup group : broadcastGroups.values()) { group.stop(); managementService.unregisterBroadcastGroup(group.getName()); } broadcastGroups.clear(); for (ClusterConnection clusterConnection : clusterConnections.values()) { clusterConnection.stop(); managementService.unregisterCluster(clusterConnection.getName().toString()); } for (Bridge bridge : bridges.values()) { bridge.stop(); managementService.unregisterBridge(bridge.getName().toString()); } bridges.clear(); } for (ServerLocatorInternal clusterLocator : clusterLocators) { try { clusterLocator.close(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.errorClosingServerLocator(e, clusterLocator); } } clusterLocators.clear(); state = State.STOPPED; clearClusterConnections(); protocolIgnoredAddresses.clear(); } public void flushExecutor() { FutureLatch future = new FutureLatch(); executor.execute(future); if (!future.await(10000)) { ActiveMQServerLogger.LOGGER.couldNotFlushClusterManager(this.toString()); server.threadDump(); } } @Override public boolean isStarted() { return state == State.STARTED; } public Map<String, Bridge> getBridges() { return new HashMap<>(bridges); } public Set<ClusterConnection> getClusterConnections() { return new HashSet<>(clusterConnections.values()); } public Set<BroadcastGroup> getBroadcastGroups() { return new HashSet<>(broadcastGroups.values()); } public ClusterConnection getClusterConnection(final String name) { return clusterConnections.get(name); } public void removeClusterLocator(final ServerLocatorInternal serverLocator) { this.clusterLocators.remove(serverLocator); } public synchronized void deployBridge(final BridgeConfiguration config) throws Exception { if (config.getName() == null) { ActiveMQServerLogger.LOGGER.bridgeNotUnique(); return; } if (config.getQueueName() == null) { ActiveMQServerLogger.LOGGER.bridgeNoQueue(config.getName()); return; } if (config.getForwardingAddress() == null) { ActiveMQServerLogger.LOGGER.bridgeNoForwardAddress(config.getName()); } if (bridges.containsKey(config.getName())) { ActiveMQServerLogger.LOGGER.bridgeAlreadyDeployed(config.getName()); return; } Transformer transformer = server.getServiceRegistry().getBridgeTransformer(config.getName(), config.getTransformerConfiguration()); Binding binding = postOffice.getBinding(new SimpleString(config.getQueueName())); if (binding == null) { ActiveMQServerLogger.LOGGER.bridgeQueueNotFound(config.getQueueName(), config.getName()); return; } if (server.hasBrokerBridgePlugins()) { server.callBrokerBridgePlugins(plugin -> plugin.beforeDeployBridge(config)); } Queue queue = (Queue) binding.getBindable(); ServerLocatorInternal serverLocator; if (config.getDiscoveryGroupName() != null) { DiscoveryGroupConfiguration discoveryGroupConfiguration = configuration.getDiscoveryGroupConfigurations().get(config.getDiscoveryGroupName()); if (discoveryGroupConfiguration == null) { ActiveMQServerLogger.LOGGER.bridgeNoDiscoveryGroup(config.getDiscoveryGroupName()); return; } if (config.isHA()) { serverLocator = (ServerLocatorInternal) ActiveMQClient.createServerLocatorWithHA(discoveryGroupConfiguration); } else { serverLocator = (ServerLocatorInternal) ActiveMQClient.createServerLocatorWithoutHA(discoveryGroupConfiguration); } } else { TransportConfiguration[] tcConfigs = configuration.getTransportConfigurations(config.getStaticConnectors()); if (tcConfigs == null) { ActiveMQServerLogger.LOGGER.bridgeCantFindConnectors(config.getName()); return; } if (config.isHA()) { serverLocator = (ServerLocatorInternal) ActiveMQClient.createServerLocatorWithHA(tcConfigs); } else { serverLocator = (ServerLocatorInternal) ActiveMQClient.createServerLocatorWithoutHA(tcConfigs); } } serverLocator.setIdentity("Bridge " + config.getName()); serverLocator.setConfirmationWindowSize(config.getConfirmationWindowSize()); // We are going to manually retry on the bridge in case of failure serverLocator.setReconnectAttempts(0); serverLocator.setInitialConnectAttempts(0); serverLocator.setRetryInterval(config.getRetryInterval()); serverLocator.setMaxRetryInterval(config.getMaxRetryInterval()); serverLocator.setRetryIntervalMultiplier(config.getRetryIntervalMultiplier()); serverLocator.setClientFailureCheckPeriod(config.getClientFailureCheckPeriod()); serverLocator.setConnectionTTL(config.getConnectionTTL()); serverLocator.setBlockOnDurableSend(!config.isUseDuplicateDetection()); serverLocator.setBlockOnNonDurableSend(!config.isUseDuplicateDetection()); serverLocator.setMinLargeMessageSize(config.getMinLargeMessageSize()); serverLocator.setProducerWindowSize(config.getProducerWindowSize()); // This will be set to 30s unless it's changed from embedded / testing // there is no reason to exception the config for this timeout // since the Bridge is supposed to be non-blocking and fast // We may expose this if we find a good use case serverLocator.setCallTimeout(config.getCallTimeout()); serverLocator.addIncomingInterceptor(new IncomingInterceptorLookingForExceptionMessage(this, executor)); if (!config.isUseDuplicateDetection()) { logger.debug("Bridge " + config.getName() + " is configured to not use duplicate detecion, it will send messages synchronously"); } clusterLocators.add(serverLocator); for (int i = 0; i < config.getConcurrency(); i++) { String name = config.getConcurrency() > 1 ? (config.getName() + "-" + i) : config.getName(); Bridge bridge = new BridgeImpl(serverLocator, config.getInitialConnectAttempts(), config.getReconnectAttempts(), config.getReconnectAttemptsOnSameNode(), config.getRetryInterval(), config.getRetryIntervalMultiplier(), config.getMaxRetryInterval(), nodeManager.getUUID(), new SimpleString(name), queue, executorFactory.getExecutor(), FilterImpl.createFilter(config.getFilterString()), SimpleString.toSimpleString(config.getForwardingAddress()), scheduledExecutor, transformer, config.isUseDuplicateDetection(), config.getUser(), config.getPassword(), server, config.getRoutingType()); bridges.put(name, bridge); managementService.registerBridge(bridge, config); bridge.start(); if (server.hasBrokerBridgePlugins()) { server.callBrokerBridgePlugins(plugin -> plugin.afterDeployBridge(bridge)); } } } public static class IncomingInterceptorLookingForExceptionMessage implements Interceptor { private final ClusterManager manager; private final Executor executor; /** * @param manager * @param executor */ public IncomingInterceptorLookingForExceptionMessage(ClusterManager manager, Executor executor) { this.manager = manager; this.executor = executor; } @Override public boolean intercept(Packet packet, RemotingConnection connection) throws ActiveMQException { if (packet.getType() == PacketImpl.EXCEPTION) { ActiveMQExceptionMessage msg = (ActiveMQExceptionMessage) packet; final ActiveMQException exception = msg.getException(); if (exception.getType() == ActiveMQExceptionType.CLUSTER_SECURITY_EXCEPTION) { ActiveMQServerLogger.LOGGER.clusterManagerAuthenticationError(exception.getMessage()); executor.execute(new Runnable() { @Override public void run() { try { manager.stop(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.failedToStopClusterManager(e); } } }); } } return true; } } public void destroyBridge(final String name) throws Exception { Bridge bridge; synchronized (this) { bridge = bridges.remove(name); if (bridge != null) { bridge.stop(); managementService.unregisterBridge(name); } } if (bridge != null) { bridge.flushExecutor(); } } // for testing public void clear() { for (Bridge bridge : bridges.values()) { try { bridge.stop(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.warn(e.getMessage(), e); } } bridges.clear(); for (ClusterConnection clusterConnection : clusterConnections.values()) { try { clusterConnection.stop(); } catch (Exception e) { ActiveMQServerLogger.LOGGER.failedToStopClusterConnection(e); } } clearClusterConnections(); } public void informClusterOfBackup(String name) { ClusterConnection clusterConnection = clusterConnections.get(name); if (clusterConnection != null) { clusterConnection.informClusterOfBackup(); } } public ClusterManager addProtocolIgnoredAddress(String ignoredAddress) { protocolIgnoredAddresses.add(ignoredAddress); return this; } public Collection<String> getProtocolIgnoredAddresses() { return protocolIgnoredAddresses; } // Private methods ---------------------------------------------------------------------------------------------------- private void clearClusterConnections() { clusterConnections.clear(); this.defaultClusterConnection = null; } private void deployClusterConnection(final ClusterConnectionConfiguration config) throws Exception { if (!config.validateConfiguration()) { return; } TransportConfiguration connector = config.getTransportConfiguration(configuration); if (connector == null) { return; } if (clusterConnections.containsKey(config.getName())) { ActiveMQServerLogger.LOGGER.clusterConnectionAlreadyExists(config.getConnectorName()); return; } ClusterConnectionImpl clusterConnection; if (config.getDiscoveryGroupName() != null) { DiscoveryGroupConfiguration dg = config.getDiscoveryGroupConfiguration(configuration); if (dg == null) return; if (logger.isDebugEnabled()) { logger.debug(this + " Starting a Discovery Group Cluster Connection, name=" + config.getDiscoveryGroupName() + ", dg=" + dg); } clusterConnection = new ClusterConnectionImpl(this, dg, connector, new SimpleString(config.getName()), new SimpleString(config.getAddress() != null ? config.getAddress() : ""), config.getMinLargeMessageSize(), config.getClientFailureCheckPeriod(), config.getConnectionTTL(), config.getRetryInterval(), config.getRetryIntervalMultiplier(), config.getMaxRetryInterval(), config.getInitialConnectAttempts(), config.getReconnectAttempts(), config.getCallTimeout(), config.getCallFailoverTimeout(), config.isDuplicateDetection(), config.getMessageLoadBalancingType(), config.getConfirmationWindowSize(), config.getProducerWindowSize(), executorFactory, server, postOffice, managementService, scheduledExecutor, config.getMaxHops(), nodeManager, server.getConfiguration().getClusterUser(), server.getConfiguration().getClusterPassword(), config.isAllowDirectConnectionsOnly(), config.getClusterNotificationInterval(), config.getClusterNotificationAttempts()); clusterController.addClusterConnection(clusterConnection.getName(), dg, config, connector); } else { TransportConfiguration[] tcConfigs = config.getTransportConfigurations(configuration); if (logger.isDebugEnabled()) { logger.debug(this + " defining cluster connection towards " + Arrays.toString(tcConfigs)); } clusterConnection = new ClusterConnectionImpl(this, tcConfigs, connector, new SimpleString(config.getName()), new SimpleString(config.getAddress()), config.getMinLargeMessageSize(), config.getClientFailureCheckPeriod(), config.getConnectionTTL(), config.getRetryInterval(), config.getRetryIntervalMultiplier(), config.getMaxRetryInterval(), config.getInitialConnectAttempts(), config.getReconnectAttempts(), config.getCallTimeout(), config.getCallFailoverTimeout(), config.isDuplicateDetection(), config.getMessageLoadBalancingType(), config.getConfirmationWindowSize(), config.getProducerWindowSize(), executorFactory, server, postOffice, managementService, scheduledExecutor, config.getMaxHops(), nodeManager, server.getConfiguration().getClusterUser(), server.getConfiguration().getClusterPassword(), config.isAllowDirectConnectionsOnly(), config.getClusterNotificationInterval(), config.getClusterNotificationAttempts()); clusterController.addClusterConnection(clusterConnection.getName(), tcConfigs, config, connector); } if (defaultClusterConnection == null) { defaultClusterConnection = clusterConnection; clusterController.setDefaultClusterConnectionName(defaultClusterConnection.getName()); } managementService.registerCluster(clusterConnection, config); clusterConnections.put(config.getName(), clusterConnection); if (logger.isTraceEnabled()) { logger.trace("ClusterConnection.start at " + clusterConnection, new Exception("trace")); } } private synchronized void deployBroadcastGroup(final BroadcastGroupConfiguration config) throws Exception { if (broadcastGroups.containsKey(config.getName())) { ActiveMQServerLogger.LOGGER.broadcastGroupAlreadyExists(config.getName()); return; } BroadcastGroup group = createBroadcastGroup(config); managementService.registerBroadcastGroup(group, config); } private BroadcastGroup createBroadcastGroup(BroadcastGroupConfiguration config) throws Exception { BroadcastGroup group = broadcastGroups.get(config.getName()); if (group == null) { group = new BroadcastGroupImpl(nodeManager, config.getName(), config.getBroadcastPeriod(), scheduledExecutor, config.getEndpointFactory()); for (String connectorInfo : config.getConnectorInfos()) { TransportConfiguration connector = configuration.getConnectorConfigurations().get(connectorInfo); if (connector == null) { logWarnNoConnector(connectorInfo, config.getName()); return null; } group.addConnector(connector); } } if (group.size() == 0) { logWarnNoConnector(config.getConnectorInfos().toString(), group.getName()); return null; } broadcastGroups.put(config.getName(), group); return group; } private void logWarnNoConnector(final String connectorName, final String bgName) { ActiveMQServerLogger.LOGGER.broadcastGroupNoConnector(connectorName, bgName); } private synchronized Collection<ClusterConnection> cloneClusterConnections() { ArrayList<ClusterConnection> list = new ArrayList<>(clusterConnections.values()); return list; } }
package net.jitix.simplestore.util; import net.jitix.simplestore.worker.DataFileLoader; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.file.OpenOption; import java.nio.file.Path; import static java.nio.file.StandardOpenOption.APPEND; import static java.nio.file.StandardOpenOption.READ; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import net.jitix.simplestore.common.SSConfiguration; import net.jitix.simplestore.common.SSConstants; import net.jitix.simplestore.common.SSContext; import net.jitix.simplestore.common.SimpleStoreException; import net.jitix.simplestore.enums.ConfKey; import net.jitix.simplestore.enums.SimpleStoreError; import net.jitix.simplestore.vo.DataFileLock; import net.jitix.simplestore.vo.RecordAddress; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Soumyajit B */ public class StorageUtil { private static final Logger LOG = LoggerFactory.getLogger(StorageUtil.class); private static final Pattern DATA_FILE_PATTERN = Pattern.compile( SSConstants.DATA_FILE_NAME_PREFIX + "[0-9]+"); public static void setupStoreDirectory(File storeDirectory, SSConfiguration conf) throws SimpleStoreException { if (storeDirectory.exists()) { //check if the path is a directory if (!storeDirectory.isDirectory()) { LOG.error("Path '{}' is not a directory", storeDirectory); throw new SimpleStoreException(SimpleStoreError.STORE_INIT_FAIL); } else { //check for lock file in directory if (conf.<Boolean>getValue(ConfKey.STORE_DIR_LOCK_FILE_FLAG)) { if (checkIfLockFileExists(storeDirectory)) { //lock file exists. don't proceed LOG.error("Lock file exists in store directory"); throw new SimpleStoreException(SimpleStoreError.STORE_LOCK_FAIL); } else { //lock file does not exist. create new and proceed createLockFile(storeDirectory); } } } } else { //check if store dir can be created if (conf.<Boolean>getValue(ConfKey.STORE_DIR_CREATE_FLAG)) { //create store directory if not present if (!storeDirectory.mkdirs()) { LOG.error("Error creating store directory '{}'", storeDirectory.getAbsolutePath()); throw new SimpleStoreException(SimpleStoreError.STORE_INIT_FAIL); } } else { LOG.error("Store directory {} does not exist", storeDirectory); throw new SimpleStoreException(SimpleStoreError.STORE_INIT_FAIL); } } } private static boolean checkIfLockFileExists(File storeDirectory) throws SimpleStoreException { File lockFile = new File(storeDirectory, SSConstants.LOCK_FILE_NAME); if (lockFile.exists()) { if (lockFile.isFile()) { return true; } else { LOG.error("Lock file path {} is not a valid file", lockFile.getAbsolutePath()); throw new SimpleStoreException(SimpleStoreError.STORE_INVALID_FILE); } } else { return false; } } private static void createLockFile(File storeDirectory) throws SimpleStoreException { File lockFile = new File(storeDirectory, SSConstants.LOCK_FILE_NAME); try { if (!lockFile.createNewFile()) { LOG.error("Error creating lock file {} in store directory {}", lockFile.getName(), storeDirectory.getAbsolutePath()); throw new SimpleStoreException(SimpleStoreError.STORE_LOCK_FAIL); } } catch (IOException e) { LOG.error("IO Error creating lock file {} in store directory {} :", lockFile.getName(), storeDirectory.getAbsolutePath(), e); throw new SimpleStoreException(SimpleStoreError.STORE_LOCK_FAIL); } } public static void loadDataFileLocks(SSContext context) throws SimpleStoreException { //list the data files in store directory File[] dataFiles = getChronologicalDataFiles(context.getStoreDirectory()); //iterate over the files and acquire lock on each of them for (File dataFile : dataFiles) { //put it in the map context.getFileLocks().put(dataFile.getName(), lockFileForReading(dataFile)); } } public static File[] getChronologicalDataFiles(File storeDirectory) { File[] dataFiles = storeDirectory.listFiles(new FileFilter() { @Override public boolean accept(File file) { return DATA_FILE_PATTERN.matcher(file.getName()).matches(); } }); //sort files by name //since name contains a numberic version/timestamp it will be sorted chronologically too Arrays.sort(dataFiles); return dataFiles; } private static DataFileLock lockFileForReading(File dataFile) throws SimpleStoreException { return lockDataFile(dataFile, READ); } private static DataFileLock lockFileForReadingAndAppending(File dataFile) throws SimpleStoreException { return lockDataFile(dataFile, READ, APPEND); } private static DataFileLock lockDataFile(File dataFile, OpenOption... options) throws SimpleStoreException { Path dataFilePath = dataFile.toPath(); try { FileChannel channel = FileChannel.open(dataFilePath, options); FileLock lock = channel.tryLock(); if (lock == null) { //lock cannot be acquired LOG.error("Error acquiring exclusive lock on file {}", dataFile.getAbsolutePath()); throw new SimpleStoreException(SimpleStoreError.STORE_LOCK_FAIL); } else { return new DataFileLock(dataFile.getName(), lock); } } catch (IOException e) { LOG.error("IO Error opening file {} with exclusive lock: {}", dataFile.getAbsolutePath(), e); throw new SimpleStoreException(SimpleStoreError.STORE_LOCK_FAIL); } } private static File getNewDataFile(File storeDirectory) { return new File(storeDirectory, SSConstants.DATA_FILE_NAME_PREFIX + System.currentTimeMillis()); } public static String createNewDataFile(SSContext context) throws SimpleStoreException { //create data file path File dataFile = getNewDataFile(context.getStoreDirectory()); try { //create data file if (!dataFile.createNewFile()) { LOG.error("Error creating new data file {}", dataFile.getAbsoluteFile()); throw new SimpleStoreException(SimpleStoreError.DATA_FILE_CREATE_ERROR); } //acquire lock context.getFileLocks().put(dataFile.getName(), lockFileForReadingAndAppending(dataFile)); return dataFile.getName(); } catch (IOException e) { LOG.error("IO Error while creating new data file", e); throw new SimpleStoreException(SimpleStoreError.DATA_FILE_CREATE_ERROR); } } }
/* * Copyright 2015-2016 USEF Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package energy.usef.core.rest; import java.util.ArrayList; import java.util.List; import javax.annotation.Generated; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang.builder.ToStringBuilder; /** * REST Result * <p> * * */ @JsonInclude(JsonInclude.Include.NON_NULL) @Generated("org.jsonschema2pojo") @JsonPropertyOrder({ "code", "headers", "body", "errors" }) public class RestResult { /** * * (Required) * */ @JsonProperty("code") private long code; /** * * (Required) * */ @JsonProperty("headers") private List<Header> headers = new ArrayList<Header>(); @JsonProperty("body") private String body; @JsonProperty("errors") private List<String> errors = new ArrayList<String>(); /** * No args constructor for use in serialization * */ public RestResult() { } /** * * @param headers * @param body * @param errors * @param code */ public RestResult(long code, List<Header> headers, String body, List<String> errors) { this.code = code; this.headers = headers; this.body = body; this.errors = errors; } /** * * (Required) * * @return * The code */ @JsonProperty("code") public long getCode() { return code; } /** * * (Required) * * @param code * The code */ @JsonProperty("code") public void setCode(long code) { this.code = code; } public RestResult withCode(long code) { this.code = code; return this; } /** * * (Required) * * @return * The headers */ @JsonProperty("headers") public List<Header> getHeaders() { return headers; } /** * * (Required) * * @param headers * The headers */ @JsonProperty("headers") public void setHeaders(List<Header> headers) { this.headers = headers; } public RestResult withHeaders(List<Header> headers) { this.headers = headers; return this; } /** * * @return * The body */ @JsonProperty("body") public String getBody() { return body; } /** * * @param body * The body */ @JsonProperty("body") public void setBody(String body) { this.body = body; } public RestResult withBody(String body) { this.body = body; return this; } /** * * @return * The errors */ @JsonProperty("errors") public List<String> getErrors() { return errors; } /** * * @param errors * The errors */ @JsonProperty("errors") public void setErrors(List<String> errors) { this.errors = errors; } public RestResult withErrors(List<String> errors) { this.errors = errors; return this; } @Override public String toString() { return ToStringBuilder.reflectionToString(this); } @Override public int hashCode() { return new HashCodeBuilder().append(code).append(headers).append(body).append(errors).toHashCode(); } @Override public boolean equals(Object other) { if (other == this) { return true; } if ((other instanceof RestResult) == false) { return false; } RestResult rhs = ((RestResult) other); return new EqualsBuilder().append(code, rhs.code).append(headers, rhs.headers).append(body, rhs.body).append(errors, rhs.errors).isEquals(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.portfolio.savings.service; import static org.apache.fineract.portfolio.savings.DepositsApiConstants.isCalendarInheritedParamName; import static org.apache.fineract.portfolio.savings.DepositsApiConstants.recurringFrequencyParamName; import static org.apache.fineract.portfolio.savings.DepositsApiConstants.recurringFrequencyTypeParamName; import static org.apache.fineract.portfolio.savings.DepositsApiConstants.transferInterestToSavingsParamName; import java.math.MathContext; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.apache.fineract.infrastructure.accountnumberformat.domain.AccountNumberFormat; import org.apache.fineract.infrastructure.accountnumberformat.domain.AccountNumberFormatRepositoryWrapper; import org.apache.fineract.infrastructure.accountnumberformat.domain.EntityAccountType; import org.apache.fineract.infrastructure.configuration.domain.ConfigurationDomainService; import org.apache.fineract.infrastructure.core.api.JsonCommand; import org.apache.fineract.infrastructure.core.data.ApiParameterError; import org.apache.fineract.infrastructure.core.data.CommandProcessingResult; import org.apache.fineract.infrastructure.core.data.CommandProcessingResultBuilder; import org.apache.fineract.infrastructure.core.data.DataValidatorBuilder; import org.apache.fineract.infrastructure.core.exception.GeneralPlatformDomainRuleException; import org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException; import org.apache.fineract.infrastructure.core.exception.PlatformDataIntegrityException; import org.apache.fineract.infrastructure.core.serialization.FromJsonHelper; import org.apache.fineract.infrastructure.core.service.DateUtils; import org.apache.fineract.infrastructure.security.service.PlatformSecurityContext; import org.apache.fineract.organisation.staff.domain.Staff; import org.apache.fineract.organisation.staff.domain.StaffRepositoryWrapper; import org.apache.fineract.portfolio.account.domain.AccountAssociationType; import org.apache.fineract.portfolio.account.domain.AccountAssociations; import org.apache.fineract.portfolio.account.domain.AccountAssociationsRepository; import org.apache.fineract.portfolio.calendar.domain.Calendar; import org.apache.fineract.portfolio.calendar.domain.CalendarEntityType; import org.apache.fineract.portfolio.calendar.domain.CalendarFrequencyType; import org.apache.fineract.portfolio.calendar.domain.CalendarInstance; import org.apache.fineract.portfolio.calendar.domain.CalendarInstanceRepository; import org.apache.fineract.portfolio.calendar.domain.CalendarType; import org.apache.fineract.portfolio.calendar.service.CalendarUtils; import org.apache.fineract.portfolio.client.domain.AccountNumberGenerator; import org.apache.fineract.portfolio.client.domain.Client; import org.apache.fineract.portfolio.client.domain.ClientRepositoryWrapper; import org.apache.fineract.portfolio.client.exception.ClientNotActiveException; import org.apache.fineract.portfolio.common.domain.PeriodFrequencyType; import org.apache.fineract.portfolio.group.domain.Group; import org.apache.fineract.portfolio.group.domain.GroupRepository; import org.apache.fineract.portfolio.group.exception.CenterNotActiveException; import org.apache.fineract.portfolio.group.exception.GroupNotActiveException; import org.apache.fineract.portfolio.group.exception.GroupNotFoundException; import org.apache.fineract.portfolio.note.domain.Note; import org.apache.fineract.portfolio.note.domain.NoteRepository; import org.apache.fineract.portfolio.savings.DepositAccountType; import org.apache.fineract.portfolio.savings.DepositsApiConstants; import org.apache.fineract.portfolio.savings.SavingsApiConstants; import org.apache.fineract.portfolio.savings.data.DepositAccountDataValidator; import org.apache.fineract.portfolio.savings.domain.DepositAccountAssembler; import org.apache.fineract.portfolio.savings.domain.FixedDepositAccount; import org.apache.fineract.portfolio.savings.domain.FixedDepositAccountRepository; import org.apache.fineract.portfolio.savings.domain.RecurringDepositAccount; import org.apache.fineract.portfolio.savings.domain.RecurringDepositAccountRepository; import org.apache.fineract.portfolio.savings.domain.SavingsAccount; import org.apache.fineract.portfolio.savings.domain.SavingsAccountCharge; import org.apache.fineract.portfolio.savings.domain.SavingsAccountChargeAssembler; import org.apache.fineract.portfolio.savings.domain.SavingsAccountRepositoryWrapper; import org.apache.fineract.portfolio.savings.domain.SavingsProduct; import org.apache.fineract.portfolio.savings.domain.SavingsProductRepository; import org.apache.fineract.portfolio.savings.exception.SavingsProductNotFoundException; import org.apache.fineract.useradministration.domain.AppUser; import org.joda.time.LocalDate; import org.joda.time.LocalTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessException; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @Service public class DepositApplicationProcessWritePlatformServiceJpaRepositoryImpl implements DepositApplicationProcessWritePlatformService { private final static Logger logger = LoggerFactory.getLogger(DepositApplicationProcessWritePlatformServiceJpaRepositoryImpl.class); private final PlatformSecurityContext context; private final SavingsAccountRepositoryWrapper savingAccountRepository; private final FixedDepositAccountRepository fixedDepositAccountRepository; private final RecurringDepositAccountRepository recurringDepositAccountRepository; private final DepositAccountAssembler depositAccountAssembler; private final DepositAccountDataValidator depositAccountDataValidator; private final AccountNumberGenerator accountNumberGenerator; private final ClientRepositoryWrapper clientRepository; private final GroupRepository groupRepository; private final SavingsProductRepository savingsProductRepository; private final NoteRepository noteRepository; private final StaffRepositoryWrapper staffRepository; private final SavingsAccountApplicationTransitionApiJsonValidator savingsAccountApplicationTransitionApiJsonValidator; private final SavingsAccountChargeAssembler savingsAccountChargeAssembler; private final AccountAssociationsRepository accountAssociationsRepository; private final FromJsonHelper fromJsonHelper; private final CalendarInstanceRepository calendarInstanceRepository; private final ConfigurationDomainService configurationDomainService; private final AccountNumberFormatRepositoryWrapper accountNumberFormatRepository; @Autowired public DepositApplicationProcessWritePlatformServiceJpaRepositoryImpl(final PlatformSecurityContext context, final SavingsAccountRepositoryWrapper savingAccountRepository, final DepositAccountAssembler depositAccountAssembler, final DepositAccountDataValidator depositAccountDataValidator, final AccountNumberGenerator accountNumberGenerator, final ClientRepositoryWrapper clientRepository, final GroupRepository groupRepository, final SavingsProductRepository savingsProductRepository, final NoteRepository noteRepository, final StaffRepositoryWrapper staffRepository, final SavingsAccountApplicationTransitionApiJsonValidator savingsAccountApplicationTransitionApiJsonValidator, final SavingsAccountChargeAssembler savingsAccountChargeAssembler, final FixedDepositAccountRepository fixedDepositAccountRepository, final RecurringDepositAccountRepository recurringDepositAccountRepository, final AccountAssociationsRepository accountAssociationsRepository, final FromJsonHelper fromJsonHelper, final CalendarInstanceRepository calendarInstanceRepository, final ConfigurationDomainService configurationDomainService, final AccountNumberFormatRepositoryWrapper accountNumberFormatRepository) { this.context = context; this.savingAccountRepository = savingAccountRepository; this.depositAccountAssembler = depositAccountAssembler; this.accountNumberGenerator = accountNumberGenerator; this.depositAccountDataValidator = depositAccountDataValidator; this.clientRepository = clientRepository; this.groupRepository = groupRepository; this.savingsProductRepository = savingsProductRepository; this.noteRepository = noteRepository; this.staffRepository = staffRepository; this.savingsAccountApplicationTransitionApiJsonValidator = savingsAccountApplicationTransitionApiJsonValidator; this.savingsAccountChargeAssembler = savingsAccountChargeAssembler; this.fixedDepositAccountRepository = fixedDepositAccountRepository; this.recurringDepositAccountRepository = recurringDepositAccountRepository; this.accountAssociationsRepository = accountAssociationsRepository; this.fromJsonHelper = fromJsonHelper; this.calendarInstanceRepository = calendarInstanceRepository; this.configurationDomainService = configurationDomainService; this.accountNumberFormatRepository = accountNumberFormatRepository; } /* * Guaranteed to throw an exception no matter what the data integrity issue * is. */ private void handleDataIntegrityIssues(final JsonCommand command, final DataAccessException dve) { final StringBuilder errorCodeBuilder = new StringBuilder("error.msg.").append(SavingsApiConstants.SAVINGS_ACCOUNT_RESOURCE_NAME); final Throwable realCause = dve.getMostSpecificCause(); if (realCause.getMessage().contains("sa_account_no_UNIQUE")) { final String accountNo = command.stringValueOfParameterNamed("accountNo"); errorCodeBuilder.append(".duplicate.accountNo"); throw new PlatformDataIntegrityException(errorCodeBuilder.toString(), "Savings account with accountNo " + accountNo + " already exists", "accountNo", accountNo); } else if (realCause.getMessage().contains("sa_external_id_UNIQUE")) { final String externalId = command.stringValueOfParameterNamed("externalId"); errorCodeBuilder.append(".duplicate.externalId"); throw new PlatformDataIntegrityException(errorCodeBuilder.toString(), "Savings account with externalId " + externalId + " already exists", "externalId", externalId); } errorCodeBuilder.append(".unknown.data.integrity.issue"); logger.error(dve.getMessage(), dve); throw new PlatformDataIntegrityException(errorCodeBuilder.toString(), "Unknown data integrity issue with savings account."); } @Transactional @Override public CommandProcessingResult submitFDApplication(final JsonCommand command) { try { this.depositAccountDataValidator.validateFixedDepositForSubmit(command.json()); final AppUser submittedBy = this.context.authenticatedUser(); final boolean isSavingsInterestPostingAtCurrentPeriodEnd = this.configurationDomainService .isSavingsInterestPostingAtCurrentPeriodEnd(); final Integer financialYearBeginningMonth = this.configurationDomainService.retrieveFinancialYearBeginningMonth(); final FixedDepositAccount account = (FixedDepositAccount) this.depositAccountAssembler.assembleFrom(command, submittedBy, DepositAccountType.FIXED_DEPOSIT); final MathContext mc = MathContext.DECIMAL64; final boolean isPreMatureClosure = false; account.updateMaturityDateAndAmountBeforeAccountActivation(mc, isPreMatureClosure, isSavingsInterestPostingAtCurrentPeriodEnd, financialYearBeginningMonth); this.fixedDepositAccountRepository.save(account); if (account.isAccountNumberRequiresAutoGeneration()) { AccountNumberFormat accountNumberFormat = this.accountNumberFormatRepository.findByAccountType(EntityAccountType.CLIENT); account.updateAccountNo(this.accountNumberGenerator.generate(account, accountNumberFormat)); this.savingAccountRepository.save(account); } // Save linked account information final Long savingsAccountId = command.longValueOfParameterNamed(DepositsApiConstants.linkedAccountParamName); if (savingsAccountId != null) { final SavingsAccount savingsAccount = this.depositAccountAssembler.assembleFrom(savingsAccountId, DepositAccountType.SAVINGS_DEPOSIT); this.depositAccountDataValidator.validatelinkedSavingsAccount(savingsAccount, account); boolean isActive = true; final AccountAssociations accountAssociations = AccountAssociations.associateSavingsAccount(account, savingsAccount, AccountAssociationType.LINKED_ACCOUNT_ASSOCIATION.getValue(), isActive); this.accountAssociationsRepository.save(accountAssociations); } final Long savingsId = account.getId(); return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(account.officeId()) // .withClientId(account.clientId()) // .withGroupId(account.groupId()) // .withSavingsId(savingsId) // .build(); } catch (final DataAccessException dve) { handleDataIntegrityIssues(command, dve); return CommandProcessingResult.empty(); } } @Transactional @Override public CommandProcessingResult submitRDApplication(final JsonCommand command) { try { this.depositAccountDataValidator.validateRecurringDepositForSubmit(command.json()); final AppUser submittedBy = this.context.authenticatedUser(); final boolean isSavingsInterestPostingAtCurrentPeriodEnd = this.configurationDomainService .isSavingsInterestPostingAtCurrentPeriodEnd(); final Integer financialYearBeginningMonth = this.configurationDomainService.retrieveFinancialYearBeginningMonth(); final RecurringDepositAccount account = (RecurringDepositAccount) this.depositAccountAssembler.assembleFrom(command, submittedBy, DepositAccountType.RECURRING_DEPOSIT); this.recurringDepositAccountRepository.save(account); if (account.isAccountNumberRequiresAutoGeneration()) { final AccountNumberFormat accountNumberFormat = this.accountNumberFormatRepository .findByAccountType(EntityAccountType.SAVINGS); account.updateAccountNo(this.accountNumberGenerator.generate(account, accountNumberFormat)); } final Long savingsId = account.getId(); final CalendarInstance calendarInstance = getCalendarInstance(command, account); this.calendarInstanceRepository.save(calendarInstance); // FIXME: Avoid save separately (Calendar instance requires account // details) final MathContext mc = MathContext.DECIMAL64; final Calendar calendar = calendarInstance.getCalendar(); final PeriodFrequencyType frequencyType = CalendarFrequencyType.from(CalendarUtils.getFrequency(calendar.getRecurrence())); Integer frequency = CalendarUtils.getInterval(calendar.getRecurrence()); frequency = frequency == -1 ? 1 : frequency; account.generateSchedule(frequencyType, frequency, calendar); final boolean isPreMatureClosure = false; account.updateMaturityDateAndAmount(mc, isPreMatureClosure, isSavingsInterestPostingAtCurrentPeriodEnd, financialYearBeginningMonth); account.validateApplicableInterestRate(); this.savingAccountRepository.save(account); return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(account.officeId()) // .withClientId(account.clientId()) // .withGroupId(account.groupId()) // .withSavingsId(savingsId) // .build(); } catch (final DataAccessException dve) { handleDataIntegrityIssues(command, dve); return CommandProcessingResult.empty(); } } private CalendarInstance getCalendarInstance(final JsonCommand command, RecurringDepositAccount account) { CalendarInstance calendarInstance = null; final boolean isCalendarInherited = command.booleanPrimitiveValueOfParameterNamed(isCalendarInheritedParamName); if (isCalendarInherited) { Set<Group> groups = account.getClient().getGroups(); Long groupId = null; if (groups.isEmpty()) { final String defaultUserMessage = "Client does not belong to group/center. Cannot follow group/center meeting frequency."; throw new GeneralPlatformDomainRuleException( "error.msg.recurring.deposit.account.cannot.create.not.belongs.to.any.groups.to.follow.meeting.frequency", defaultUserMessage, account.clientId()); } else if (groups.size() > 1) { final String defaultUserMessage = "Client belongs to more than one group. Cannot support recurring deposit."; throw new GeneralPlatformDomainRuleException( "error.msg.recurring.deposit.account.cannot.create.belongs.to.multiple.groups", defaultUserMessage, account.clientId()); } else { Group group = groups.iterator().next(); Group parent = group.getParent(); Integer entityType = CalendarEntityType.GROUPS.getValue(); if (parent != null) { groupId = parent.getId(); entityType = CalendarEntityType.CENTERS.getValue(); } else { groupId = group.getId(); } CalendarInstance parentCalendarInstance = this.calendarInstanceRepository.findByEntityIdAndEntityTypeIdAndCalendarTypeId( groupId, entityType, CalendarType.COLLECTION.getValue()); if(parentCalendarInstance == null){ final String defaultUserMessage = "Meeting frequency is not attached to the Group/Center to which the client belongs to."; throw new GeneralPlatformDomainRuleException( "error.msg.meeting.frequency.not.attached.to.group.to.which.client.belongs.to", defaultUserMessage, account.clientId()); } calendarInstance = CalendarInstance.from(parentCalendarInstance.getCalendar(), account.getId(), CalendarEntityType.SAVINGS.getValue()); } } else { LocalDate calendarStartDate = account.depositStartDate(); final Integer frequencyType = command.integerValueSansLocaleOfParameterNamed(recurringFrequencyTypeParamName); final PeriodFrequencyType periodFrequencyType = PeriodFrequencyType.fromInt(frequencyType); final Integer frequency = command.integerValueSansLocaleOfParameterNamed(recurringFrequencyParamName); final Integer repeatsOnDay = calendarStartDate.getDayOfWeek(); final String title = "recurring_savings_" + account.getId(); final Calendar calendar = Calendar.createRepeatingCalendar(title, calendarStartDate, CalendarType.COLLECTION.getValue(), CalendarFrequencyType.from(periodFrequencyType), frequency, repeatsOnDay, null); calendarInstance = CalendarInstance.from(calendar, account.getId(), CalendarEntityType.SAVINGS.getValue()); } if (calendarInstance == null) { final String defaultUserMessage = "No valid recurring details available for recurring depost account creation."; throw new GeneralPlatformDomainRuleException( "error.msg.recurring.deposit.account.cannot.create.no.valid.recurring.details.available", defaultUserMessage, account.clientId()); } return calendarInstance; } @Transactional @Override public CommandProcessingResult modifyFDApplication(final Long accountId, final JsonCommand command) { try { this.depositAccountDataValidator.validateFixedDepositForUpdate(command.json()); final boolean isSavingsInterestPostingAtCurrentPeriodEnd = this.configurationDomainService .isSavingsInterestPostingAtCurrentPeriodEnd(); final Integer financialYearBeginningMonth = this.configurationDomainService.retrieveFinancialYearBeginningMonth(); final Map<String, Object> changes = new LinkedHashMap<>(20); final FixedDepositAccount account = (FixedDepositAccount) this.depositAccountAssembler.assembleFrom(accountId, DepositAccountType.FIXED_DEPOSIT); checkClientOrGroupActive(account); account.modifyApplication(command, changes); account.validateNewApplicationState(DateUtils.getLocalDateOfTenant(), DepositAccountType.FIXED_DEPOSIT.resourceName()); if (!changes.isEmpty()) { updateFDAndRDCommonChanges(changes, command, account); final MathContext mc = MathContext.DECIMAL64; final boolean isPreMatureClosure = false; account.updateMaturityDateAndAmountBeforeAccountActivation(mc, isPreMatureClosure, isSavingsInterestPostingAtCurrentPeriodEnd, financialYearBeginningMonth); this.savingAccountRepository.save(account); } boolean isLinkedAccRequired = command.booleanPrimitiveValueOfParameterNamed(transferInterestToSavingsParamName); // Save linked account information final Long savingsAccountId = command.longValueOfParameterNamed(DepositsApiConstants.linkedAccountParamName); AccountAssociations accountAssociations = this.accountAssociationsRepository.findBySavingsIdAndType(accountId, AccountAssociationType.LINKED_ACCOUNT_ASSOCIATION.getValue()); if (savingsAccountId == null) { if (accountAssociations != null) { if (this.fromJsonHelper.parameterExists(DepositsApiConstants.linkedAccountParamName, command.parsedJson())) { this.accountAssociationsRepository.delete(accountAssociations); changes.put(DepositsApiConstants.linkedAccountParamName, null); if (isLinkedAccRequired) { this.depositAccountDataValidator.throwLinkedAccountRequiredError(); } } } else if (isLinkedAccRequired) { this.depositAccountDataValidator.throwLinkedAccountRequiredError(); } } else { boolean isModified = false; if (accountAssociations == null) { isModified = true; } else { final SavingsAccount savingsAccount = accountAssociations.linkedSavingsAccount(); if (savingsAccount == null || savingsAccount.getId() != savingsAccountId) { isModified = true; } } if (isModified) { final SavingsAccount savingsAccount = this.depositAccountAssembler.assembleFrom(savingsAccountId, DepositAccountType.SAVINGS_DEPOSIT); this.depositAccountDataValidator.validatelinkedSavingsAccount(savingsAccount, account); if (accountAssociations == null) { boolean isActive = true; accountAssociations = AccountAssociations.associateSavingsAccount(account, savingsAccount, AccountAssociationType.LINKED_ACCOUNT_ASSOCIATION.getValue(), isActive); } else { accountAssociations.updateLinkedSavingsAccount(savingsAccount); } changes.put(DepositsApiConstants.linkedAccountParamName, savingsAccountId); this.accountAssociationsRepository.save(accountAssociations); } } return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(accountId) // .withOfficeId(account.officeId()) // .withClientId(account.clientId()) // .withGroupId(account.groupId()) // .withSavingsId(accountId) // .with(changes) // .build(); } catch (final DataAccessException dve) { handleDataIntegrityIssues(command, dve); return new CommandProcessingResult(Long.valueOf(-1)); } } @Transactional @Override public CommandProcessingResult modifyRDApplication(final Long accountId, final JsonCommand command) { try { this.depositAccountDataValidator.validateRecurringDepositForUpdate(command.json()); final boolean isSavingsInterestPostingAtCurrentPeriodEnd = this.configurationDomainService .isSavingsInterestPostingAtCurrentPeriodEnd(); final Integer financialYearBeginningMonth = this.configurationDomainService.retrieveFinancialYearBeginningMonth(); final Map<String, Object> changes = new LinkedHashMap<>(20); final RecurringDepositAccount account = (RecurringDepositAccount) this.depositAccountAssembler.assembleFrom(accountId, DepositAccountType.RECURRING_DEPOSIT); checkClientOrGroupActive(account); account.modifyApplication(command, changes); account.validateNewApplicationState(DateUtils.getLocalDateOfTenant(), DepositAccountType.RECURRING_DEPOSIT.resourceName()); if (!changes.isEmpty()) { updateFDAndRDCommonChanges(changes, command, account); final MathContext mc = MathContext.DECIMAL64; final CalendarInstance calendarInstance = this.calendarInstanceRepository.findByEntityIdAndEntityTypeIdAndCalendarTypeId( accountId, CalendarEntityType.SAVINGS.getValue(), CalendarType.COLLECTION.getValue()); final Calendar calendar = calendarInstance.getCalendar(); final PeriodFrequencyType frequencyType = CalendarFrequencyType.from(CalendarUtils.getFrequency(calendar.getRecurrence())); Integer frequency = CalendarUtils.getInterval(calendar.getRecurrence()); frequency = frequency == -1 ? 1 : frequency; account.generateSchedule(frequencyType, frequency, calendar); final boolean isPreMatureClosure = false; account.updateMaturityDateAndAmount(mc, isPreMatureClosure, isSavingsInterestPostingAtCurrentPeriodEnd, financialYearBeginningMonth); account.validateApplicableInterestRate(); this.savingAccountRepository.save(account); } // update calendar details if (!account.isCalendarInherited()) { final LocalDate calendarStartDate = account.depositStartDate(); final Integer frequencyType = command.integerValueSansLocaleOfParameterNamed(recurringFrequencyTypeParamName); final PeriodFrequencyType periodFrequencyType = PeriodFrequencyType.fromInt(frequencyType); final Integer frequency = command.integerValueSansLocaleOfParameterNamed(recurringFrequencyParamName); final Integer repeatsOnDay = calendarStartDate.getDayOfWeek(); CalendarInstance calendarInstance = this.calendarInstanceRepository.findByEntityIdAndEntityTypeIdAndCalendarTypeId( accountId, CalendarEntityType.SAVINGS.getValue(), CalendarType.COLLECTION.getValue()); Calendar calendar = calendarInstance.getCalendar(); calendar.updateRepeatingCalendar(calendarStartDate, CalendarFrequencyType.from(periodFrequencyType), frequency, repeatsOnDay, null); this.calendarInstanceRepository.save(calendarInstance); } return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(accountId) // .withOfficeId(account.officeId()) // .withClientId(account.clientId()) // .withGroupId(account.groupId()) // .withSavingsId(accountId) // .with(changes) // .build(); } catch (final DataAccessException dve) { handleDataIntegrityIssues(command, dve); return new CommandProcessingResult(Long.valueOf(-1)); } } private void updateFDAndRDCommonChanges(final Map<String, Object> changes, final JsonCommand command, final SavingsAccount account) { if (changes.containsKey(SavingsApiConstants.clientIdParamName)) { final Long clientId = command.longValueOfParameterNamed(SavingsApiConstants.clientIdParamName); if (clientId != null) { final Client client = this.clientRepository.findOneWithNotFoundDetection(clientId); if (client.isNotActive()) { throw new ClientNotActiveException(clientId); } account.update(client); } else { final Client client = null; account.update(client); } } if (changes.containsKey(SavingsApiConstants.groupIdParamName)) { final Long groupId = command.longValueOfParameterNamed(SavingsApiConstants.groupIdParamName); if (groupId != null) { final Group group = this.groupRepository.findOne(groupId); if (group == null) { throw new GroupNotFoundException(groupId); } if (group.isNotActive()) { if (group.isCenter()) { throw new CenterNotActiveException(groupId); } throw new GroupNotActiveException(groupId); } account.update(group); } else { final Group group = null; account.update(group); } } if (changes.containsKey(SavingsApiConstants.productIdParamName)) { final Long productId = command.longValueOfParameterNamed(SavingsApiConstants.productIdParamName); final SavingsProduct product = this.savingsProductRepository.findOne(productId); if (product == null) { throw new SavingsProductNotFoundException(productId); } account.update(product); } if (changes.containsKey(SavingsApiConstants.fieldOfficerIdParamName)) { final Long fieldOfficerId = command.longValueOfParameterNamed(SavingsApiConstants.fieldOfficerIdParamName); Staff fieldOfficer = null; if (fieldOfficerId != null) { fieldOfficer = this.staffRepository.findOneWithNotFoundDetection(fieldOfficerId); } else { changes.put(SavingsApiConstants.fieldOfficerIdParamName, ""); } account.update(fieldOfficer); } if (changes.containsKey("charges")) { final Set<SavingsAccountCharge> charges = this.savingsAccountChargeAssembler.fromParsedJson(command.parsedJson(), account .getCurrency().getCode()); final boolean updated = account.update(charges); if (!updated) { changes.remove("charges"); } } } @Transactional @Override public CommandProcessingResult deleteApplication(final Long savingsId, final DepositAccountType depositAccountType) { final SavingsAccount account = this.depositAccountAssembler.assembleFrom(savingsId, depositAccountType); checkClientOrGroupActive(account); if (account.isNotSubmittedAndPendingApproval()) { final List<ApiParameterError> dataValidationErrors = new ArrayList<>(); final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource(depositAccountType .resourceName() + DepositsApiConstants.deleteApplicationAction); baseDataValidator.reset().parameter(DepositsApiConstants.activatedOnDateParamName) .failWithCodeNoParameterAddedToErrorCode("not.in.submittedandpendingapproval.state"); if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); } } final List<Note> relatedNotes = this.noteRepository.findBySavingsAccountId(savingsId); this.noteRepository.deleteInBatch(relatedNotes); this.savingAccountRepository.delete(account); return new CommandProcessingResultBuilder() // .withEntityId(savingsId) // .withOfficeId(account.officeId()) // .withClientId(account.clientId()) // .withGroupId(account.groupId()) // .withSavingsId(savingsId) // .build(); } @Transactional @Override public CommandProcessingResult approveApplication(final Long savingsId, final JsonCommand command, final DepositAccountType depositAccountType) { final AppUser currentUser = this.context.authenticatedUser(); this.savingsAccountApplicationTransitionApiJsonValidator.validateApproval(command.json()); final SavingsAccount savingsAccount = this.depositAccountAssembler.assembleFrom(savingsId, depositAccountType); checkClientOrGroupActive(savingsAccount); final Map<String, Object> changes = savingsAccount.approveApplication(currentUser, command, DateUtils.getLocalDateOfTenant()); if (!changes.isEmpty()) { this.savingAccountRepository.save(savingsAccount); final String noteText = command.stringValueOfParameterNamed("note"); if (StringUtils.isNotBlank(noteText)) { final Note note = Note.savingNote(savingsAccount, noteText); changes.put("note", noteText); this.noteRepository.save(note); } } return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(savingsAccount.officeId()) // .withClientId(savingsAccount.clientId()) // .withGroupId(savingsAccount.groupId()) // .withSavingsId(savingsId) // .with(changes) // .build(); } @Transactional @Override public CommandProcessingResult undoApplicationApproval(final Long savingsId, final JsonCommand command, final DepositAccountType depositAccountType) { this.context.authenticatedUser(); this.savingsAccountApplicationTransitionApiJsonValidator.validateForUndo(command.json()); final SavingsAccount savingsAccount = this.depositAccountAssembler.assembleFrom(savingsId, depositAccountType); checkClientOrGroupActive(savingsAccount); final Map<String, Object> changes = savingsAccount.undoApplicationApproval(); if (!changes.isEmpty()) { this.savingAccountRepository.save(savingsAccount); final String noteText = command.stringValueOfParameterNamed("note"); if (StringUtils.isNotBlank(noteText)) { final Note note = Note.savingNote(savingsAccount, noteText); changes.put("note", noteText); this.noteRepository.save(note); } } return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(savingsAccount.officeId()) // .withClientId(savingsAccount.clientId()) // .withGroupId(savingsAccount.groupId()) // .withSavingsId(savingsId) // .with(changes) // .build(); } @Transactional @Override public CommandProcessingResult rejectApplication(final Long savingsId, final JsonCommand command, final DepositAccountType depositAccountType) { final AppUser currentUser = this.context.authenticatedUser(); this.savingsAccountApplicationTransitionApiJsonValidator.validateRejection(command.json()); final SavingsAccount savingsAccount = this.depositAccountAssembler.assembleFrom(savingsId, depositAccountType); checkClientOrGroupActive(savingsAccount); final Map<String, Object> changes = savingsAccount.rejectApplication(currentUser, command, DateUtils.getLocalDateOfTenant()); if (!changes.isEmpty()) { this.savingAccountRepository.save(savingsAccount); final String noteText = command.stringValueOfParameterNamed("note"); if (StringUtils.isNotBlank(noteText)) { final Note note = Note.savingNote(savingsAccount, noteText); changes.put("note", noteText); this.noteRepository.save(note); } } return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(savingsAccount.officeId()) // .withClientId(savingsAccount.clientId()) // .withGroupId(savingsAccount.groupId()) // .withSavingsId(savingsId) // .with(changes) // .build(); } @Transactional @Override public CommandProcessingResult applicantWithdrawsFromApplication(final Long savingsId, final JsonCommand command, final DepositAccountType depositAccountType) { final AppUser currentUser = this.context.authenticatedUser(); this.savingsAccountApplicationTransitionApiJsonValidator.validateApplicantWithdrawal(command.json()); final SavingsAccount savingsAccount = this.depositAccountAssembler.assembleFrom(savingsId, depositAccountType); checkClientOrGroupActive(savingsAccount); final Map<String, Object> changes = savingsAccount.applicantWithdrawsFromApplication(currentUser, command, DateUtils.getLocalDateOfTenant()); if (!changes.isEmpty()) { this.savingAccountRepository.save(savingsAccount); final String noteText = command.stringValueOfParameterNamed("note"); if (StringUtils.isNotBlank(noteText)) { final Note note = Note.savingNote(savingsAccount, noteText); changes.put("note", noteText); this.noteRepository.save(note); } } return new CommandProcessingResultBuilder() // .withCommandId(command.commandId()) // .withEntityId(savingsId) // .withOfficeId(savingsAccount.officeId()) // .withClientId(savingsAccount.clientId()) // .withGroupId(savingsAccount.groupId()) // .withSavingsId(savingsId) // .with(changes) // .build(); } private void checkClientOrGroupActive(final SavingsAccount account) { final Client client = account.getClient(); if (client != null) { if (client.isNotActive()) { throw new ClientNotActiveException(client.getId()); } } final Group group = account.group(); if (group != null) { if (group.isNotActive()) { if (group.isCenter()) { throw new CenterNotActiveException(group.getId()); } throw new GroupNotActiveException(group.getId()); } } } }
package org.marketcetera.photon.views; import org.eclipse.core.databinding.AggregateValidationStatus; import org.eclipse.core.databinding.Binding; import org.eclipse.core.databinding.DataBindingContext; import org.eclipse.core.databinding.ObservablesManager; import org.eclipse.core.databinding.UpdateValueStrategy; import org.eclipse.core.databinding.beans.BeansObservables; import org.eclipse.core.databinding.conversion.Converter; import org.eclipse.core.databinding.conversion.NumberToStringConverter; import org.eclipse.core.databinding.observable.IObservable; import org.eclipse.core.databinding.observable.list.IListChangeListener; import org.eclipse.core.databinding.observable.list.ListChangeEvent; import org.eclipse.core.databinding.observable.list.ListDiffEntry; import org.eclipse.core.databinding.observable.value.DecoratingObservableValue; import org.eclipse.core.databinding.observable.value.IObservableValue; import org.eclipse.core.databinding.observable.value.IValueChangeListener; import org.eclipse.core.databinding.observable.value.ValueChangeEvent; import org.eclipse.core.runtime.IStatus; import org.eclipse.jface.databinding.swt.SWTObservables; import org.eclipse.jface.databinding.viewers.ObservableListContentProvider; import org.eclipse.jface.databinding.viewers.ObservableMapLabelProvider; import org.eclipse.jface.databinding.viewers.ViewersObservables; import org.eclipse.jface.fieldassist.FieldDecorationRegistry; import org.eclipse.jface.viewers.ArrayContentProvider; import org.eclipse.jface.viewers.CheckStateChangedEvent; import org.eclipse.jface.viewers.CheckboxTableViewer; import org.eclipse.jface.viewers.ComboViewer; import org.eclipse.jface.viewers.ICheckStateListener; import org.eclipse.swt.SWT; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.events.FocusAdapter; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.KeyAdapter; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.IMemento; import org.eclipse.ui.IViewSite; import org.eclipse.ui.PartInitException; import org.eclipse.ui.forms.widgets.ScrolledForm; import org.marketcetera.photon.BrokerManager; import org.marketcetera.photon.PhotonPlugin; import org.marketcetera.photon.BrokerManager.Broker; import org.marketcetera.photon.BrokerManager.BrokerLabelProvider; import org.marketcetera.photon.commons.databinding.TypedConverter; import org.marketcetera.photon.commons.databinding.TypedObservableValue; import org.marketcetera.photon.commons.ui.databinding.RequiredFieldSupport; import org.marketcetera.photon.commons.ui.databinding.UpdateStrategyFactory; import org.marketcetera.photon.ui.databinding.StatusToImageConverter; import org.marketcetera.trade.BrokerID; import org.marketcetera.trade.NewOrReplaceOrder; import org.marketcetera.trade.OrderReplace; import org.marketcetera.trade.OrderSingle; import org.marketcetera.util.misc.ClassVersion; import com.ibm.icu.text.NumberFormat; /* $License$ */ /** * This is the abstract base class for all order ticket views. It is responsible * for setting up the databindings for the "common" order ticket fields, such as * side, price, and time in force. * * It also is responsible for managing the "custom fields" for order messages * that can be set by the user in the preferences dialog, and activated in the * order ticket. * * @author gmiller * @author <a href="mailto:will@marketcetera.com">Will Horn</a> * @since 0.6.0 */ @ClassVersion("$Id: OrderTicketView.java 16154 2012-07-14 16:34:05Z colin $") public abstract class OrderTicketView<M extends OrderTicketModel, T extends IOrderTicket> extends XSWTView<T> { private static final String CUSTOM_FIELD_VIEW_SAVED_STATE_KEY_PREFIX = "CUSTOM_FIELD_CHECKED_STATE_OF_"; //$NON-NLS-1$ private final Class<T> mTicketClass; private final ObservablesManager mObservablesManager = new ObservablesManager(); private final M mModel; private IMemento mMemento; private ComboViewer mAvailableBrokersViewer; private CheckboxTableViewer mCustomFieldsTableViewer; private ComboViewer mSideComboViewer; private ComboViewer mTimeInForceComboViewer; private ComboViewer mOrderTypeComboViewer; private IValueChangeListener mFocusListener; /** * Constructor. * * @param ticketClass * type of ticket class * @param model * the ticket model */ protected OrderTicketView(Class<T> ticketClass, M model) { mTicketClass = ticketClass; mModel = model; } @Override public void init(IViewSite site, IMemento memento) throws PartInitException { super.init(site, memento); mMemento = memento; } @Override protected Class<T> getXSWTInterfaceClass() { return mTicketClass; } /** * Returns the view model. * * @return the view model */ protected M getModel() { return mModel; } /** * Returns the {@link ObservablesManager} that will clean up managed * observables. * * @return the observables manager */ public ObservablesManager getObservablesManager() { return mObservablesManager; } @Override protected void finishUI() { T ticket = getXSWTView(); /* * Set background of error message area. */ Color bg = ticket.getForm().getParent().getBackground(); ticket.getErrorIconLabel().setBackground(bg); ticket.getErrorMessageLabel().setBackground(bg); /* * Set up viewers. */ initViewers(ticket); /* * Additional widget customizations. */ customizeWidgets(ticket); /* * Handle clear button click. */ ticket.getClearButton().addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { getModel().clearOrderMessage(); } }); /* * Handle send button click. */ ticket.getSendButton().addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { handleSend(); } }); /* * Bind to model. */ try { bindFormTitle(); bindMessage(); bindCustomFields(); } catch (Exception e) { PhotonPlugin.getMainConsoleLogger().error( Messages.ORDER_TICKET_VIEW_CANNOT_BIND_TO_TICKET.getText(), e); } /* * Initialize validation (error message area). */ initValidation(); /* * Control focus when the model's order changes. */ mFocusListener = new IValueChangeListener() { @Override public void handleValueChange(ValueChangeEvent event) { setFocus(); } }; getModel().getOrderObservable().addValueChangeListener(mFocusListener); ticket.getForm().reflow(true); } /** * Customize the widgets. * * @param ticket * the order ticket. */ protected void customizeWidgets(T ticket) { /* * Update size of text fields since default will be small. */ updateSize(ticket.getQuantityText(), 10); updateSize(ticket.getSymbolText(), 10); updateSize(ticket.getPriceText(), 10); updateSize(ticket.getAccountText(), 10); /* * Customize text fields to auto select the text on focus to make it * easy to change the value. */ selectOnFocus(ticket.getQuantityText()); selectOnFocus(ticket.getSymbolText()); selectOnFocus(ticket.getPriceText()); selectOnFocus(ticket.getAccountText()); /* * If the ticket has no errors, enter on these fields will trigger a * send. */ addSendOrderListener(ticket.getSideCombo()); addSendOrderListener(ticket.getQuantityText()); addSendOrderListener(ticket.getSymbolText()); addSendOrderListener(ticket.getOrderTypeCombo()); addSendOrderListener(ticket.getPriceText()); addSendOrderListener(ticket.getBrokerCombo()); addSendOrderListener(ticket.getTifCombo()); addSendOrderListener(ticket.getAccountText()); } /** * Set up viewers. * * @param ticket */ protected void initViewers(T ticket) { /* * Side combo based on Side enum. */ mSideComboViewer = new ComboViewer(ticket.getSideCombo()); mSideComboViewer.setContentProvider(new ArrayContentProvider()); mSideComboViewer.setInput(getModel().getValidSideValues()); /* * Order type combo based on OrderType enum. */ mOrderTypeComboViewer = new ComboViewer(ticket.getOrderTypeCombo()); mOrderTypeComboViewer.setContentProvider(new ArrayContentProvider()); mOrderTypeComboViewer.setInput(getModel().getValidOrderTypeValues()); /* * Broker combo based on available brokers. */ mAvailableBrokersViewer = new ComboViewer(ticket.getBrokerCombo()); mAvailableBrokersViewer .setContentProvider(new ObservableListContentProvider()); mAvailableBrokersViewer.setLabelProvider(new BrokerLabelProvider()); mAvailableBrokersViewer.setInput(getModel().getValidBrokers()); /* * Time in Force combo based on TimeInForce enum. * * An extra blank entry is added since the field is optional. */ mTimeInForceComboViewer = new ComboViewer(ticket.getTifCombo()); mTimeInForceComboViewer.setContentProvider(new ArrayContentProvider()); mTimeInForceComboViewer .setInput(getModel().getValidTimeInForceValues()); /* * Custom fields table. * * Input is bound to model in bindCustomFields. */ mCustomFieldsTableViewer = new CheckboxTableViewer(ticket .getCustomFieldsTable()); ObservableListContentProvider contentProvider = new ObservableListContentProvider(); mCustomFieldsTableViewer.setContentProvider(contentProvider); mCustomFieldsTableViewer .setLabelProvider(new ObservableMapLabelProvider( BeansObservables.observeMaps(contentProvider .getKnownElements(), CustomField.class, new String[] { "keyString", "valueString" })));//$NON-NLS-1$ //$NON-NLS-2$ } /** * Get the UI string to show for a "new order" message. * * @return the UI string */ protected abstract String getNewOrderString(); /** * Get the UI string to show for a "replace" message. * * @return the UI string */ protected abstract String getReplaceOrderString(); /** * Bind the top level form title to show different text depending on the * order type. */ protected void bindFormTitle() { TypedObservableValue<String> formTextObservable = new TypedObservableValue<String>( String.class) { @Override protected String doGetValue() { return getXSWTView().getForm().getText(); } @Override protected void doSetTypedValue(String value) { getXSWTView().getForm().setText(value); } }; getObservablesManager().addObservable(formTextObservable); getDataBindingContext().bindValue( formTextObservable, getModel().getOrderObservable(), null, new UpdateValueStrategy().setConverter(new Converter( NewOrReplaceOrder.class, String.class) { public Object convert(Object fromObject) { if (fromObject instanceof OrderReplace) { return getReplaceOrderString(); } else if (fromObject instanceof OrderSingle) { return getNewOrderString(); } else { return null; } } })); } /** * Binds the UI to the model. */ protected void bindMessage() { final DataBindingContext dbc = getDataBindingContext(); final OrderTicketModel model = getModel(); final IOrderTicket ticket = getXSWTView(); /* * Side */ bindRequiredCombo(mSideComboViewer, model.getSide(), Messages.ORDER_TICKET_VIEW_SIDE__LABEL.getText()); enableForNewOrderOnly(mSideComboViewer.getControl()); /* * Quantity */ bindRequiredDecimal(ticket.getQuantityText(), model.getQuantity(), Messages.ORDER_TICKET_VIEW_QUANTITY__LABEL.getText()); /* * Symbol */ bindRequiredText(ticket.getSymbolText(), getModel().getSymbol(), Messages.ORDER_TICKET_VIEW_SYMBOL__LABEL.getText()); enableForNewOrderOnly(ticket.getSymbolText()); /* * Order Type */ bindRequiredCombo(mOrderTypeComboViewer, model.getOrderType(), Messages.ORDER_TICKET_VIEW_ORDER_TYPE__LABEL.getText()); /* * Price * * Need custom required field logic since price is only required for * limit orders. */ { Binding binding = bindDecimal(ticket.getPriceText(), model .getPrice(), Messages.ORDER_TICKET_VIEW_PRICE__LABEL .getText()); /* * RequiredFieldSupport reports an error if the value is null or * empty string. We want this behavior when the order is a limit * order, but not when it is a market order (since empty string is * correct as the price is uneditable. So we decorate the observable * and pass the decorated one to RequiredFieldsupport. */ IObservableValue priceDecorator = new DecoratingObservableValue( (IObservableValue) binding.getTarget(), false) { @Override public Object getValue() { Object actualValue = super.getValue(); if ("".equals(actualValue) //$NON-NLS-1$ && !model.isLimitOrder().getTypedValue()) { /* * Return an object to "trick" RequiredFieldSupport to * not error. */ return new Object(); } return actualValue; } }; RequiredFieldSupport.initFor(dbc, priceDecorator, Messages.ORDER_TICKET_VIEW_PRICE__LABEL.getText(), false, SWT.BOTTOM | SWT.LEFT, binding); dbc.bindValue(SWTObservables.observeEnabled(ticket.getPriceText()), model.isLimitOrder()); } /* * Broker * * Custom binding logic required since the viewer list can dynamically * change. */ { IObservableValue target = ViewersObservables .observeSingleSelection(mAvailableBrokersViewer); /* * Bind the target (combo) to the model, but use POLICY_ON_REQUEST * for target-to-model binding since we don't want the model to * change simply because a broker went down. The target-to-model * updates are handled manually below. */ final Binding binding = dbc.bindValue(target, model.getBrokerId(), new UpdateValueStrategy( UpdateValueStrategy.POLICY_ON_REQUEST) .setConverter(new TypedConverter<Broker, BrokerID>( Broker.class, BrokerID.class) { @Override public BrokerID doConvert(Broker fromObject) { return fromObject.getId(); } }), new UpdateValueStrategy() .setConverter(new TypedConverter<BrokerID, Broker>( BrokerID.class, Broker.class) { @Override public Broker doConvert(BrokerID fromObject) { return BrokerManager.getCurrent() .getBroker(fromObject); } })); /* * If the target changes and the new value is not null, then this * was a user selection and the model should be updated. */ target.addValueChangeListener(new IValueChangeListener() { @Override public void handleValueChange(ValueChangeEvent event) { if (event.diff.getNewValue() != null) { binding.updateTargetToModel(); } } }); /* * When the broker list changes, we force a model-to-target update * to ensure the two are in sync if possible. */ final IListChangeListener listener = new IListChangeListener() { @Override public void handleListChange(ListChangeEvent event) { binding.updateModelToTarget(); } }; BrokerManager.getCurrent().getAvailableBrokers() .addListChangeListener(listener); /* * Need to remove the listener when the widget is disposed. */ mAvailableBrokersViewer.getControl().addDisposeListener( new DisposeListener() { @Override public void widgetDisposed(DisposeEvent e) { BrokerManager.getCurrent().getAvailableBrokers() .removeListChangeListener(listener); } }); /* * If the model has a broker id, but the target doesn't have a * corresponding entry, the target will be null which needs to * generate an error. */ setRequired(binding, Messages.ORDER_TICKET_VIEW_BROKER__LABEL .getText()); } enableForNewOrderOnly(mAvailableBrokersViewer.getControl()); /* * Time in Force */ bindCombo(mTimeInForceComboViewer, model.getTimeInForce()); /* * Account */ bindText(getXSWTView().getAccountText(), model.getAccount()); } /** * Bind the custom fields on the model to the view. */ protected void bindCustomFields() { M model = getModel(); mCustomFieldsTableViewer.setInput(model.getCustomFieldsList()); mCustomFieldsTableViewer .addCheckStateListener(new ICheckStateListener() { public void checkStateChanged(CheckStateChangedEvent event) { Object source = event.getElement(); ((CustomField) source).setEnabled(event.getChecked()); } }); model.getCustomFieldsList().addListChangeListener( new IListChangeListener() { public void handleListChange(ListChangeEvent event) { ScrolledForm theForm = getXSWTView().getForm(); if (!theForm.isDisposed()) { ListDiffEntry[] differences = event.diff .getDifferences(); for (ListDiffEntry listDiffEntry : differences) { if (listDiffEntry.isAddition()) { CustomField customField = (CustomField) listDiffEntry .getElement(); String key = CUSTOM_FIELD_VIEW_SAVED_STATE_KEY_PREFIX + customField.getKeyString(); IMemento theMemento = getMemento(); if (theMemento != null && theMemento.getInteger(key) != null) { boolean itemChecked = (theMemento .getInteger(key).intValue() != 0); customField.setEnabled(itemChecked); } } } theForm.reflow(true); } } }); } /** * Initialization the validation (error message area) of the view. */ protected void initValidation() { DataBindingContext dbc = getDataBindingContext(); AggregateValidationStatus aggregateValidationStatus = new AggregateValidationStatus( dbc, AggregateValidationStatus.MAX_SEVERITY); dbc.bindValue(SWTObservables.observeText(getXSWTView() .getErrorMessageLabel()), aggregateValidationStatus); dbc.bindValue(SWTObservables.observeImage(getXSWTView() .getErrorIconLabel()), aggregateValidationStatus, null, new UpdateValueStrategy() .setConverter(new StatusToImageConverter())); dbc.bindValue(SWTObservables.observeEnabled(getXSWTView() .getSendButton()), aggregateValidationStatus, null, new UpdateValueStrategy() .setConverter(new TypedConverter<IStatus, Boolean>( IStatus.class, Boolean.class) { @Override public Boolean doConvert(IStatus fromObject) { return fromObject.getSeverity() < IStatus.ERROR; } })); } /** * Binds a combo viewer to a model field that is required. * * @param viewer * the viewer * @param model * the model observable * @return the binding */ protected Binding bindCombo(ComboViewer viewer, IObservableValue model) { DataBindingContext dbc = getDataBindingContext(); IObservableValue target = ViewersObservables .observeSingleSelection(viewer); return dbc.bindValue(target, model, new UpdateValueStrategy() .setConverter(new Converter(target.getValueType(), model .getValueType()) { @Override public Object convert(Object fromObject) { return fromObject instanceof OrderTicketModel.NullSentinel ? null : fromObject; } }), null); } /** * Binds a combo viewer and makes it required. * * @param viewer * the viewer * @param model * the model observable * @param description * the description for error messages * @return the binding */ protected Binding bindRequiredCombo(ComboViewer viewer, IObservableValue model, String description) { DataBindingContext dbc = getDataBindingContext(); IObservableValue target = ViewersObservables .observeSingleSelection(viewer); Binding binding = dbc.bindValue(target, model); setRequired(binding, description); return binding; } /** * Binds a text widget to a BigDecimal value. * * @param text * the widget * @param model * the model observable * @param description * the description for error messages * @return the binding */ protected Binding bindDecimal(Text text, IObservableValue model, String description) { DataBindingContext dbc = getDataBindingContext(); IObservableValue target = SWTObservables.observeText(text, SWT.Modify); NumberFormat numberFormat = NumberFormat.getInstance(); numberFormat.setGroupingUsed(false); return dbc.bindValue(target, model, UpdateStrategyFactory .withConvertErrorMessage(new UpdateValueStrategy(), Messages.ORDER_TICKET_VIEW_NOT_DECIMAL_ERROR .getText(description)), new UpdateValueStrategy().setConverter(NumberToStringConverter .fromBigDecimal(numberFormat))); } /** * Binds a text widget to a BigDecimal value and makes it required. * * @param text * the widget * @param model * the model observable * @param description * the description for error messages * @return the binding */ protected Binding bindRequiredDecimal(Text text, IObservableValue model, String description) { Binding binding = bindDecimal(text, model, description); setRequired(binding, description); return binding; } /** * Binds a text widget to the model. * * @param text * the widget * @param model * the model observable * @return the binding */ protected Binding bindText(Text text, IObservableValue model) { DataBindingContext dbc = getDataBindingContext(); IObservableValue target = SWTObservables.observeText(text, SWT.Modify); UpdateValueStrategy targetToModel = null; if (model.getValueType() == String.class) { /* * Clearing a text box should set the model to null, not empty * string. */ targetToModel = new UpdateValueStrategy() .setConverter(new TypedConverter<String, String>( String.class, String.class) { @Override protected String doConvert(String fromObject) { if (fromObject != null && fromObject.isEmpty()) { return null; } return fromObject; } }); } return dbc.bindValue(target, model, targetToModel, null); } /** * Binds a text widget and makes it required. * * @param text * the widget * @param model * the model observable * @param description * the description for error messages * @return the binding */ protected Binding bindRequiredText(Text text, IObservableValue model, String description) { Binding binding = bindText(text, model); setRequired(binding, description); return binding; } /** * Add required semantics to a binding. * * @param binding * the binding * @param description * the description for error messages */ protected void setRequired(Binding binding, String description) { RequiredFieldSupport.initFor(getDataBindingContext(), binding .getTarget(), description, false, SWT.BOTTOM | SWT.LEFT, binding); } /** * Add required semantics to a control. * * @param target * the control's observable * @param description * the description for error messages * @param binding * a binding that also contributes validation status, can be null */ protected void setRequired(IObservable target, String description, Binding binding) { RequiredFieldSupport.initFor(getDataBindingContext(), target, description, false, SWT.BOTTOM | SWT.LEFT, binding); } /** * Configures a control to be enabled only when model contains a new order * (as opposed to a replace order). * * @param control * the control */ protected void enableForNewOrderOnly(Control control) { getDataBindingContext().bindValue( SWTObservables.observeEnabled(control), getModel().getOrderObservable(), null, new UpdateValueStrategy().setConverter(new Converter( NewOrReplaceOrder.class, Boolean.class) { @Override public Object convert(Object fromObject) { return fromObject instanceof OrderSingle; } })); } /** * Customizes a text widget to select the entire text when it receives focus * (makes it easy to change). * * @param text * the widget */ protected void selectOnFocus(Text text) { text.addFocusListener(new FocusAdapter() { @Override public void focusGained(FocusEvent e) { ((Text) e.widget).selectAll(); } }); } /** * Hook up a listener to the targetControl that listens for {@link SWT#CR} * and invokes {@link #handleSend()}. * * @param targetControl * the control to hook up */ protected void addSendOrderListener(Control targetControl) { targetControl.addKeyListener(new KeyAdapter() { @Override public void keyReleased(KeyEvent e) { if (e.character == SWT.CR) { if (getXSWTView().getSendButton().isEnabled()) { handleSend(); } } } }); } /** * This method "completes" the message by calling * {@link OrderTicketModel#completeMessage()}, sends the order via the * controller, then resets the message in the view model. */ protected void handleSend() { try { // TODO: this logic should probably be in the controller PhotonPlugin plugin = PhotonPlugin.getDefault(); mModel.completeMessage(); NewOrReplaceOrder orderMessage = mModel.getOrderObservable() .getTypedValue(); plugin.getPhotonController().sendOrderChecked(orderMessage); mModel.clearOrderMessage(); } catch (Exception e) { String errorMessage = e.getLocalizedMessage(); PhotonPlugin.getMainConsoleLogger().error(errorMessage); showErrorMessage(errorMessage, IStatus.ERROR); } } /** * Show the given error message in this order ticket's error display area. * * @param errorMessage * the text of the error message * @param severity * the severity of the error message, see {@link IStatus} */ protected void showErrorMessage(String errorMessage, int severity) { Label errorMessageLabel = getXSWTView().getErrorMessageLabel(); Label errorIconLabel = getXSWTView().getErrorIconLabel(); if (errorMessage == null) { errorMessageLabel.setText(""); //$NON-NLS-1$ errorIconLabel.setImage(null); } else { errorMessageLabel.setText(errorMessage); if (severity == IStatus.OK) { errorIconLabel.setImage(null); } else { if (severity == IStatus.ERROR) errorIconLabel.setImage(FieldDecorationRegistry .getDefault().getFieldDecoration( FieldDecorationRegistry.DEC_ERROR) .getImage()); else errorIconLabel.setImage(FieldDecorationRegistry .getDefault().getFieldDecoration( FieldDecorationRegistry.DEC_WARNING) .getImage()); } } } /** * Get the memento used for storing preferences and state for this view. * * @return the memento */ protected IMemento getMemento() { return mMemento; } /** * Stores the checked state of each of the custom fields in the view. */ @Override public void saveState(IMemento memento) { TableItem[] items = getXSWTView().getCustomFieldsTable().getItems(); for (int i = 0; i < items.length; i++) { TableItem item = items[i]; String key = OrderTicketView.CUSTOM_FIELD_VIEW_SAVED_STATE_KEY_PREFIX + item.getText(1); memento.putInteger(key, (item.getChecked() ? 1 : 0)); } } /** * Set the focus on the Side control (in the case of a new order) or the * Quantity control (in the case of a replace order). */ @Override public void setFocus() { IOrderTicket ticket = getXSWTView(); if (ticket.getSideCombo().isEnabled()) { ticket.getSideCombo().setFocus(); } else { ticket.getQuantityText().setFocus(); } } @Override public void dispose() { getModel().getOrderObservable().removeValueChangeListener( mFocusListener); mObservablesManager.dispose(); super.dispose(); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2012 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.ascanrulesBeta; import java.net.SocketException; import java.util.LinkedHashMap; import java.util.Map; import org.apache.commons.configuration.ConversionException; import org.apache.commons.httpclient.InvalidRedirectLocationException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.core.scanner.AbstractAppParamPlugin; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.core.scanner.Category; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.zap.extension.ruleconfig.RuleConfigParam; import org.zaproxy.zap.model.Tech; import org.zaproxy.zap.model.TechSet; /** * TODO: maybe implement a more specific UNION based check for Hypersonic (with table names) * * <p>The SqlInjectionHypersonicScanRule identifies Hypersonic specific SQL Injection * vulnerabilities using Hypersonic specific syntax. If it doesn't use Hypersonic specific syntax, * it belongs in the generic SQLInjection class! Note the ordering of checks, for efficiency is : 1) * Error based (N/A) 2) Boolean Based (N/A - uses standard syntax) 3) UNION based (TODO) 4) Stacked * (N/A - uses standard syntax) 5) Blind/Time Based (Yes) * * <p>See the following for some great (non-Hypersonic specific) specific tricks which could be * integrated here http://www.websec.ca/kb/sql_injection * http://pentestmonkey.net/cheat-sheet/sql-injection/oracle-sql-injection-cheat-sheet * * <p>Important Notes for the Hypersonic database (and useful in the code): - takes -- style * comments - requires a table name in normal select statements (like Oracle: cannot just say * "select 1" or "select 2" like in most RDBMSs - requires a table name in "union select" statements * (like Oracle). - allows stacked queries via JDBC driver. - Constants in select must be in single * quotes, not doubles (like Oracle). - supports UDFs in the form of Java code (very interesting!!) * - x second delay select statement: select "java.lang.Thread.sleep"(5000) from * INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = * 'TABLE_NAME' - metadata select statement: select TABLE_NAME, COLUMN_NAME, TYPE_NAME, COLUMN_SIZE, * DECIMAL_DIGITS, IS_NULLABLE from INFORMATION_SCHEMA.SYSTEM_COLUMNS * * @author 70pointer */ public class SqlInjectionHypersonicScanRule extends AbstractAppParamPlugin { private boolean doUnionBased = false; // TODO: use in Union based, when we implement it private boolean doTimeBased = false; private int doUnionMaxRequests = 0; // TODO: use in Union based, when we implement it private int doTimeMaxRequests = 0; // note this is in milliseconds private int sleepInMs = 15000; /** Hypersonic one-line comment */ public static final String SQL_ONE_LINE_COMMENT = " -- "; private static final String ORIG_VALUE_TOKEN = "<<<<ORIGINALVALUE>>>>"; private static final String SLEEP_TOKEN = "<<<<SLEEP>>>>"; /** * create a map of SQL related error message fragments, and map them back to the RDBMS that they * are associated with keep the ordering the same as the order in which the values are inserted, * to allow the more (subjectively judged) common cases to be tested first Note: these should * represent actual (driver level) error messages for things like syntax error, otherwise we are * simply guessing that the string should/might occur. */ private static final Map<String, String> SQL_ERROR_TO_DBMS = new LinkedHashMap<>(); static { SQL_ERROR_TO_DBMS.put("org.hsql", "Hypersonic SQL"); SQL_ERROR_TO_DBMS.put("hSql.", "Hypersonic SQL"); SQL_ERROR_TO_DBMS.put("Unexpected token , requires FROM in statement", "Hypersonic SQL"); SQL_ERROR_TO_DBMS.put("Unexpected end of command in statement", "Hypersonic SQL"); SQL_ERROR_TO_DBMS.put("Column count does not match in statement", "Hypersonic SQL"); SQL_ERROR_TO_DBMS.put("Table not found in statement", "Hypersonic SQL"); SQL_ERROR_TO_DBMS.put("Unexpected token:", "Hypersonic SQL"); // Note: only Hypersonic mappings here. } /** the sleep function in Hypersonic SQL */ private static String SQL_HYPERSONIC_TIME_FUNCTION = "\"java.lang.Thread.sleep\"(" + SLEEP_TOKEN + ")"; /** Hypersonic specific time based injection strings. */ // issue with "+" symbols in here: // we cannot encode them here as %2B, as then the database gets them double encoded as %252Bn // we cannot leave them as unencoded '+' characters either, as then they are NOT encoded by the // HttpMessage.setGetParams (x) or by AbstractPlugin.sendAndReceive (HttpMessage) // and are seen by the database as spaces :( // in short, we cannot use the "+" character in parameters, unless we mean to use it as a space // character!!!! Particularly Nasty. // Workaround: use RDBMS specific functions like "CONCAT(a,b,c)" which mean parsing the original // value into the middle of the parameter value to be passed, // rather than just appending to it // Issue: this technique does not close the open ' or " in the query.. so do not use it.. // Note: <<<<ORIGINALVALUE>>>> is replaced with the original parameter value at runtime in these // examples below (see * comment) // TODO: maybe add support for ')' after the original value, before the sleeps private static String[] SQL_HYPERSONIC_TIME_REPLACEMENTS = { "; select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME'" + SQL_ONE_LINE_COMMENT, "'; select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME'" + SQL_ONE_LINE_COMMENT, "\"; select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME'" + SQL_ONE_LINE_COMMENT, "); select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME'" + SQL_ONE_LINE_COMMENT, SQL_HYPERSONIC_TIME_FUNCTION, ORIG_VALUE_TOKEN + " / " + SQL_HYPERSONIC_TIME_FUNCTION + " ", ORIG_VALUE_TOKEN + "' / " + SQL_HYPERSONIC_TIME_FUNCTION + " / '", ORIG_VALUE_TOKEN + "\" / " + SQL_HYPERSONIC_TIME_FUNCTION + " / \"", ORIG_VALUE_TOKEN + " and exists ( select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME')" + SQL_ONE_LINE_COMMENT, // Param in WHERE clause somewhere ORIG_VALUE_TOKEN + "' and exists ( select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME')" + SQL_ONE_LINE_COMMENT, // Param in WHERE clause somewhere ORIG_VALUE_TOKEN + "\" and exists ( select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME')" + SQL_ONE_LINE_COMMENT, // Param in WHERE clause somewhere ORIG_VALUE_TOKEN + ") and exists ( select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME')" + SQL_ONE_LINE_COMMENT, // Param in WHERE clause somewhere ORIG_VALUE_TOKEN + " or exists ( select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME')" + SQL_ONE_LINE_COMMENT, // Param in WHERE clause somewhere ORIG_VALUE_TOKEN + "' or exists ( select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME')" + SQL_ONE_LINE_COMMENT, // Param in WHERE clause somewhere ORIG_VALUE_TOKEN + "\" or exists ( select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME')" + SQL_ONE_LINE_COMMENT, // Param in WHERE clause somewhere ORIG_VALUE_TOKEN + ") or exists ( select " + SQL_HYPERSONIC_TIME_FUNCTION + " from INFORMATION_SCHEMA.SYSTEM_COLUMNS where TABLE_NAME = 'SYSTEM_COLUMNS' and COLUMN_NAME = 'TABLE_NAME')" + SQL_ONE_LINE_COMMENT, // Param in WHERE clause somewhere }; /** for logging. */ private static Logger log = LogManager.getLogger(SqlInjectionHypersonicScanRule.class); @Override public int getId() { return 40020; } @Override public String getName() { return Constant.messages.getString("ascanbeta.sqlinjection.hypersonic.name"); } @Override public boolean targets(TechSet technologies) { return technologies.includes(Tech.HypersonicSQL); } @Override public String getDescription() { return Constant.messages.getString("ascanbeta.sqlinjection.desc"); } @Override public int getCategory() { return Category.INJECTION; } @Override public String getSolution() { return Constant.messages.getString("ascanbeta.sqlinjection.soln"); } @Override public String getReference() { return Constant.messages.getString("ascanbeta.sqlinjection.refs"); } @Override public void init() { log.debug("Initialising"); // set up what we are allowed to do, depending on the attack strength that was set. if (this.getAttackStrength() == AttackStrength.LOW) { doTimeBased = true; doTimeMaxRequests = 3; doUnionBased = true; doUnionMaxRequests = 3; } else if (this.getAttackStrength() == AttackStrength.MEDIUM) { doTimeBased = true; doTimeMaxRequests = 5; doUnionBased = true; doUnionMaxRequests = 5; } else if (this.getAttackStrength() == AttackStrength.HIGH) { doTimeBased = true; doTimeMaxRequests = 10; doUnionBased = true; doUnionMaxRequests = 10; } else if (this.getAttackStrength() == AttackStrength.INSANE) { doTimeBased = true; doTimeMaxRequests = 100; doUnionBased = true; doUnionMaxRequests = 100; } // Read the sleep value from the configs - note this is in milliseconds try { this.sleepInMs = this.getConfig().getInt(RuleConfigParam.RULE_COMMON_SLEEP_TIME, 15) * 1000; } catch (ConversionException e) { log.debug( "Invalid value for 'rules.common.sleep': {}", this.getConfig().getString(RuleConfigParam.RULE_COMMON_SLEEP_TIME)); } log.debug("Sleep set to {} milliseconds", sleepInMs); } /** * scans for SQL Injection vulnerabilities, using Hypersonic specific syntax. If it doesn't use * specifically Hypersonic syntax, it does not belong in here, but in SQLInjection */ @Override public void scan(HttpMessage originalMessage, String paramName, String paramValue) { try { // Timing Baseline check: we need to get the time that it took the original query, to // know if the time based check is working correctly.. HttpMessage msgTimeBaseline = getNewMsg(); try { sendAndReceive(msgTimeBaseline, false); // do not follow redirects } catch (java.net.SocketTimeoutException e) { // to be expected occasionally, if the base query was one that contains some // parameters exploiting time based SQL injection? log.debug( "The Base Time Check timed out on [{}] URL [{}]", msgTimeBaseline.getRequestHeader().getMethod(), msgTimeBaseline.getRequestHeader().getURI()); } catch (SocketException ex) { log.debug( "Caught {} {} when accessing: {} for Base Time Check", ex.getClass().getName(), ex.getMessage(), msgTimeBaseline.getRequestHeader().getURI()); return; // No need to keep going } long originalTimeUsed = msgTimeBaseline.getTimeElapsedMillis(); // end of timing baseline check int countUnionBasedRequests = 0; int countTimeBasedRequests = 0; log.debug( "Scanning URL [{}] [{}], field [{}] with value [{}] for SQL Injection", getBaseMsg().getRequestHeader().getMethod(), getBaseMsg().getRequestHeader().getURI(), paramName, paramValue); // Hypersonic specific time based SQL injection checks for (int timeBasedSQLindex = 0; timeBasedSQLindex < SQL_HYPERSONIC_TIME_REPLACEMENTS.length && doTimeBased && countTimeBasedRequests < doTimeMaxRequests; timeBasedSQLindex++) { HttpMessage msgAttack = getNewMsg(); String newTimeBasedInjectionValue = SQL_HYPERSONIC_TIME_REPLACEMENTS[timeBasedSQLindex] .replace(ORIG_VALUE_TOKEN, paramValue) .replace(SLEEP_TOKEN, Integer.toString(sleepInMs)); setParameter(msgAttack, paramName, newTimeBasedInjectionValue); // send it. try { sendAndReceive(msgAttack, false); // do not follow redirects countTimeBasedRequests++; } catch (java.net.SocketTimeoutException e) { // this is to be expected, if we start sending slow queries to the database. // ignore it in this case.. and just get the time. log.debug( "The time check query timed out on [{}] URL [{}] on field: [{}]", msgTimeBaseline.getRequestHeader().getMethod(), msgTimeBaseline.getRequestHeader().getURI(), paramName); } catch (SocketException ex) { log.debug( "Caught {} {} when accessing: {} for time check query", ex.getClass().getName(), ex.getMessage(), msgTimeBaseline.getRequestHeader().getURI()); return; // No need to keep going } long modifiedTimeUsed = msgAttack.getTimeElapsedMillis(); log.debug( "Time Based SQL Injection test: [{}] on field: [{}] with value [{}] took {}ms, where the original took {}ms", newTimeBasedInjectionValue, paramName, newTimeBasedInjectionValue, modifiedTimeUsed, originalTimeUsed); if (modifiedTimeUsed >= (originalTimeUsed + sleepInMs)) { // takes more than 15 (by default) extra seconds => likely time based SQL // injection. // But first double check HttpMessage msgc = getNewMsg(); try { sendAndReceive(msgc, false); // do not follow redirects } catch (Exception e) { // Ignore all exceptions } long checkTimeUsed = msgc.getTimeElapsedMillis(); if (checkTimeUsed >= (originalTimeUsed + this.sleepInMs - 200)) { // Looks like the server is overloaded, very unlikely this is a real issue continue; } String extraInfo = Constant.messages.getString( "ascanbeta.sqlinjection.alert.timebased.extrainfo", newTimeBasedInjectionValue, modifiedTimeUsed, paramValue, originalTimeUsed); String attack = Constant.messages.getString( "ascanbeta.sqlinjection.alert.booleanbased.attack", paramName, newTimeBasedInjectionValue); newAlert() .setConfidence(Alert.CONFIDENCE_MEDIUM) .setName(getName() + " - Time Based") .setUri(getBaseMsg().getRequestHeader().getURI().toString()) .setParam(paramName) .setAttack(attack) .setOtherInfo(extraInfo) .setMessage(msgAttack) .raise(); log.debug( "A likely Time Based SQL Injection Vulnerability has been found with [{}] URL [{}] on field: [{}]", msgAttack.getRequestHeader().getMethod(), msgAttack.getRequestHeader().getURI(), paramName); return; } // query took longer than the amount of time we attempted to retard it by } // for each time based SQL index // end of check for time based SQL Injection } catch (InvalidRedirectLocationException e) { // Not an error, just means we probably attacked the redirect location } catch (Exception e) { // Do not try to internationalise this.. we need an error message in any event.. // if it's in English, it's still better than not having it at all. log.error( "An error occurred checking a url for Hypersonic SQL Injection vulnerabilities", e); } } public void setSleepInMs(int sleepInMs) { this.sleepInMs = sleepInMs; } @Override public int getRisk() { return Alert.RISK_HIGH; } @Override public int getCweId() { return 89; } @Override public int getWascId() { return 19; } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ /* * Contains code from GNU Trove having the license below. * * Copyright (c) 2001, Eric D. Friedman All Rights Reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package com.gemstone.gemfire.internal.cache; import com.gemstone.gnu.trove.*; import java.util.Arrays; /** * An open addressed set implementation for int primitives. * * @author darrel */ public class TStatelessIntHashSet extends TStatelessIntHash { /** * Creates a new <code>TStatelessIntHashSet</code> instance with the default * capacity and load factor. */ public TStatelessIntHashSet(int freeValue) { super(freeValue); } /** * Creates a new <code>TStatelessIntHashSet</code> instance with a prime * capacity equal to or greater than <tt>initialCapacity</tt> and * with the default load factor. * * @param initialCapacity an <code>int</code> value */ public TStatelessIntHashSet(int freeValue, int initialCapacity) { super(freeValue, initialCapacity); } /** * Creates a new <code>TStatelessIntHashSet</code> instance with a prime * capacity equal to or greater than <tt>initialCapacity</tt> and * with the specified load factor. * * @param initialCapacity an <code>int</code> value * @param loadFactor a <code>float</code> value */ public TStatelessIntHashSet(int freeValue, int initialCapacity, float loadFactor) { super(freeValue, initialCapacity, loadFactor); } /** * Creates a new <code>TStatelessIntHashSet</code> instance containing the * elements of <tt>array</tt>. * * @param array an array of <code>int</code> primitives */ public TStatelessIntHashSet(int freeValue, int[] array) { this(freeValue, array.length); addAll(array); } /** * Creates a new <code>TStatelessIntHash</code> instance with the default * capacity and load factor. * @param strategy used to compute hash codes and to compare keys. */ public TStatelessIntHashSet(int freeValue, TIntHashingStrategy strategy) { super(freeValue, strategy); } /** * Creates a new <code>TStatelessIntHash</code> instance whose capacity * is the next highest prime above <tt>initialCapacity + 1</tt> * unless that value is already prime. * * @param initialCapacity an <code>int</code> value * @param strategy used to compute hash codes and to compare keys. */ public TStatelessIntHashSet(int freeValue, int initialCapacity, TIntHashingStrategy strategy) { super(freeValue, initialCapacity, strategy); } /** * Creates a new <code>TStatelessIntHash</code> instance with a prime * value at or near the specified capacity and load factor. * * @param initialCapacity used to find a prime capacity for the table. * @param loadFactor used to calculate the threshold over which * rehashing takes place. * @param strategy used to compute hash codes and to compare keys. */ public TStatelessIntHashSet(int freeValue, int initialCapacity, float loadFactor, TIntHashingStrategy strategy) { super(freeValue, initialCapacity, loadFactor, strategy); } /** * Creates a new <code>TStatelessIntHashSet</code> instance containing the * elements of <tt>array</tt>. * * @param array an array of <code>int</code> primitives * @param strategy used to compute hash codes and to compare keys. */ public TStatelessIntHashSet(int freeValue, int[] array, TIntHashingStrategy strategy) { this(freeValue, array.length, strategy); addAll(array); } /** * @return a TIntIterator with access to the values in this set */ public TStatelessIntIterator iterator() { return new TStatelessIntIterator(this); } /** * Inserts a value into the set. * * @param val an <code>int</code> value * @return true if the set was modified by the add operation */ public boolean add(int val) { int index = insertionIndex(val); if (index < 0) { return false; // already present in set, nothing to add } int previousState = _set[index]; _set[index] = val; postInsertHook(previousState == this._FREE); return true; // yes, we added something } /** * Expands the set to accomodate new values. * * @param newCapacity an <code>int</code> value */ @Override protected void rehash(int newCapacity) { int oldCapacity = _set.length; int oldSet[] = _set; _set = new int[newCapacity]; if (this._FREE != 0) { Arrays.fill(_set, this._FREE); } for (int i = oldCapacity; i-- > 0;) { int o = oldSet[i]; if (o != this._FREE) { int index = insertionIndex(o); _set[index] = o; } } } /** * Returns a new array containing the values in the set. * * @return an <code>int[]</code> value */ public int[] toArray() { int[] result = new int[size()]; int[] set = _set; for (int i = set.length, j = 0; i-- > 0;) { int o = set[i]; if (o != this._FREE) { result[j++] = o; } } return result; } /** * Empties the set. */ @Override public void clear() { super.clear(); java.util.Arrays.fill(_set, this._FREE); } /** * Compares this set with another set for equality of their stored * entries. * * @param other an <code>Object</code> value * @return a <code>boolean</code> value */ @Override public boolean equals(Object other) { if (! (other instanceof TStatelessIntHashSet)) { return false; } final TStatelessIntHashSet that = (TStatelessIntHashSet)other; if (that.size() != this.size()) { return false; } return forEach(new TIntProcedure() { public final boolean execute(int value) { return that.contains(value); } }); } @Override public int hashCode() { HashProcedure p = new HashProcedure(); forEach(p); return p.getHashCode(); } protected final class HashProcedure implements TIntProcedure { private int h = 0; public int getHashCode() { return h; } public final boolean execute(int key) { h += _hashingStrategy.computeHashCode(key); return true; } } // /** // * Removes <tt>val</tt> from the set. // * // * @param val an <code>int</code> value // * @return true if the set was modified by the remove operation. // */ // public boolean remove(int val) { // int index = index(val); // if (index >= 0) { // removeAt(index); // return true; // } // return false; // } /** * Tests the set to determine if all of the elements in * <tt>array</tt> are present. * * @param array an <code>array</code> of int primitives. * @return true if all elements were present in the set. */ public boolean containsAll(int[] array) { for (int i = array.length; i-- > 0;) { if (! contains(array[i])) { return false; } } return true; } /** * Adds all of the elements in <tt>array</tt> to the set. * * @param array an <code>array</code> of int primitives. * @return true if the set was modified by the add all operation. */ public boolean addAll(int[] array) { boolean changed = false; for (int i = array.length; i-- > 0;) { if (add(array[i])) { changed = true; } } return changed; } // /** // * Removes all of the elements in <tt>array</tt> from the set. // * // * @param array an <code>array</code> of int primitives. // * @return true if the set was modified by the remove all operation. // */ // public boolean removeAll(int[] array) { // boolean changed = false; // for (int i = array.length; i-- > 0;) { // if (remove(array[i])) { // changed = true; // } // } // return changed; // } // /** // * Removes any values in the set which are not contained in // * <tt>array</tt>. // * // * @param array an <code>array</code> of int primitives. // * @return true if the set was modified by the retain all operation // */ // public boolean retainAll(int[] array) { // boolean changed = false; // Arrays.sort(array); // int[] set = _set; // for (int i = set.length; i-- > 0;) { // if (set[i] != this._FREE && (Arrays.binarySearch(array,set[i]) < 0)) { // remove(set[i]); // changed = true; // } // } // return changed; // } } // TStatelessIntHashSet
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.cassandra; import com.datastax.driver.core.DataType; import com.datastax.driver.core.LocalDate; import com.datastax.driver.core.ProtocolVersion; import com.datastax.driver.core.Row; import com.datastax.driver.core.utils.Bytes; import com.google.common.annotations.VisibleForTesting; import com.google.common.net.InetAddresses; import io.airlift.slice.Slice; import io.prestosql.spi.predicate.NullableValue; import io.prestosql.spi.type.BigintType; import io.prestosql.spi.type.BooleanType; import io.prestosql.spi.type.DateType; import io.prestosql.spi.type.DoubleType; import io.prestosql.spi.type.IntegerType; import io.prestosql.spi.type.RealType; import io.prestosql.spi.type.SmallintType; import io.prestosql.spi.type.TimeZoneKey; import io.prestosql.spi.type.TimestampWithTimeZoneType; import io.prestosql.spi.type.TinyintType; import io.prestosql.spi.type.Type; import io.prestosql.spi.type.VarbinaryType; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.Collection; import java.util.Date; import java.util.Map; import java.util.Optional; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.net.InetAddresses.toAddrString; import static io.airlift.slice.Slices.utf8Slice; import static io.airlift.slice.Slices.wrappedBuffer; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.quoteStringLiteral; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.quoteStringLiteralForJson; import static io.prestosql.spi.type.DateTimeEncoding.packDateTimeWithZone; import static io.prestosql.spi.type.DateTimeEncoding.unpackMillisUtc; import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType; import static io.prestosql.spi.type.VarcharType.createVarcharType; import static io.prestosql.spi.type.Varchars.isVarcharType; import static java.lang.Float.floatToRawIntBits; import static java.lang.Float.intBitsToFloat; import static java.util.Objects.requireNonNull; public enum CassandraType { BOOLEAN(BooleanType.BOOLEAN), TINYINT(TinyintType.TINYINT), SMALLINT(SmallintType.SMALLINT), INT(IntegerType.INTEGER), BIGINT(BigintType.BIGINT), FLOAT(RealType.REAL), DOUBLE(DoubleType.DOUBLE), DECIMAL(DoubleType.DOUBLE), DATE(DateType.DATE), TIMESTAMP(TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE), ASCII(createUnboundedVarcharType()), TEXT(createUnboundedVarcharType()), VARCHAR(createUnboundedVarcharType()), BLOB(VarbinaryType.VARBINARY), UUID(createVarcharType(Constants.UUID_STRING_MAX_LENGTH)), TIMEUUID(createVarcharType(Constants.UUID_STRING_MAX_LENGTH)), COUNTER(BigintType.BIGINT), VARINT(createUnboundedVarcharType()), INET(createVarcharType(Constants.IP_ADDRESS_STRING_MAX_LENGTH)), CUSTOM(VarbinaryType.VARBINARY), LIST(createUnboundedVarcharType()), SET(createUnboundedVarcharType()), MAP(createUnboundedVarcharType()), /**/; private static final class Constants { private static final int UUID_STRING_MAX_LENGTH = 36; // IPv4: 255.255.255.255 - 15 characters // IPv6: FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF - 39 characters // IPv4 embedded into IPv6: FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:255.255.255.255 - 45 characters private static final int IP_ADDRESS_STRING_MAX_LENGTH = 45; } private final Type prestoType; CassandraType(Type prestoType) { this.prestoType = requireNonNull(prestoType, "prestoType is null"); } public Type getPrestoType() { return prestoType; } public static Optional<CassandraType> toCassandraType(DataType.Name name) { switch (name) { case ASCII: return Optional.of(ASCII); case BIGINT: return Optional.of(BIGINT); case BLOB: return Optional.of(BLOB); case BOOLEAN: return Optional.of(BOOLEAN); case COUNTER: return Optional.of(COUNTER); case CUSTOM: return Optional.of(CUSTOM); case DATE: return Optional.of(DATE); case DECIMAL: return Optional.of(DECIMAL); case DOUBLE: return Optional.of(DOUBLE); case FLOAT: return Optional.of(FLOAT); case INET: return Optional.of(INET); case INT: return Optional.of(INT); case LIST: return Optional.of(LIST); case MAP: return Optional.of(MAP); case SET: return Optional.of(SET); case SMALLINT: return Optional.of(SMALLINT); case TEXT: return Optional.of(TEXT); case TIMESTAMP: return Optional.of(TIMESTAMP); case TIMEUUID: return Optional.of(TIMEUUID); case TINYINT: return Optional.of(TINYINT); case UUID: return Optional.of(UUID); case VARCHAR: return Optional.of(VARCHAR); case VARINT: return Optional.of(VARINT); default: return Optional.empty(); } } public NullableValue getColumnValue(Row row, int position) { if (row.isNull(position)) { return NullableValue.asNull(prestoType); } switch (this) { case ASCII: case TEXT: case VARCHAR: return NullableValue.of(prestoType, utf8Slice(row.getString(position))); case INT: return NullableValue.of(prestoType, (long) row.getInt(position)); case SMALLINT: return NullableValue.of(prestoType, (long) row.getShort(position)); case TINYINT: return NullableValue.of(prestoType, (long) row.getByte(position)); case BIGINT: case COUNTER: return NullableValue.of(prestoType, row.getLong(position)); case BOOLEAN: return NullableValue.of(prestoType, row.getBool(position)); case DOUBLE: return NullableValue.of(prestoType, row.getDouble(position)); case FLOAT: return NullableValue.of(prestoType, (long) floatToRawIntBits(row.getFloat(position))); case DECIMAL: return NullableValue.of(prestoType, row.getDecimal(position).doubleValue()); case UUID: case TIMEUUID: return NullableValue.of(prestoType, utf8Slice(row.getUUID(position).toString())); case TIMESTAMP: return NullableValue.of(prestoType, packDateTimeWithZone(row.getTimestamp(position).getTime(), TimeZoneKey.UTC_KEY)); case DATE: return NullableValue.of(prestoType, (long) row.getDate(position).getDaysSinceEpoch()); case INET: return NullableValue.of(prestoType, utf8Slice(toAddrString(row.getInet(position)))); case VARINT: return NullableValue.of(prestoType, utf8Slice(row.getVarint(position).toString())); case BLOB: case CUSTOM: return NullableValue.of(prestoType, wrappedBuffer(row.getBytesUnsafe(position))); case SET: case LIST: return NullableValue.of(prestoType, utf8Slice(buildArrayValue(row, position))); case MAP: return NullableValue.of(prestoType, utf8Slice(buildMapValue(row, position))); default: throw new IllegalStateException("Handling of type " + this + " is not implemented"); } } private static String buildMapValue(Row row, int position) { DataType type = row.getColumnDefinitions().getType(position); checkArgument(type.getTypeArguments().size() == 2, "Expected two type arguments, got: %s", type.getTypeArguments()); DataType keyType = type.getTypeArguments().get(0); DataType valueType = type.getTypeArguments().get(1); return buildMapValue((Map<?, ?>) row.getObject(position), keyType, valueType); } private static String buildMapValue(Map<?, ?> cassandraMap, DataType keyType, DataType valueType) { StringBuilder sb = new StringBuilder(); sb.append("{"); for (Map.Entry<?, ?> entry : cassandraMap.entrySet()) { if (sb.length() > 1) { sb.append(","); } sb.append(objectToJson(entry.getKey(), keyType)); sb.append(":"); sb.append(objectToJson(entry.getValue(), valueType)); } sb.append("}"); return sb.toString(); } private static String buildArrayValue(Row row, int position) { DataType type = row.getColumnDefinitions().getType(position); DataType elementType = getOnlyElement(type.getTypeArguments()); return buildArrayValue((Collection<?>) row.getObject(position), elementType); } @VisibleForTesting static String buildArrayValue(Collection<?> cassandraCollection, DataType elementType) { StringBuilder sb = new StringBuilder(); sb.append("["); for (Object value : cassandraCollection) { if (sb.length() > 1) { sb.append(","); } sb.append(objectToJson(value, elementType)); } sb.append("]"); return sb.toString(); } // TODO unify with toCqlLiteral public String getColumnValueForCql(Row row, int position) { if (row.isNull(position)) { return null; } switch (this) { case ASCII: case TEXT: case VARCHAR: return quoteStringLiteral(row.getString(position)); case INT: return Integer.toString(row.getInt(position)); case SMALLINT: return Short.toString(row.getShort(position)); case TINYINT: return Byte.toString(row.getByte(position)); case BIGINT: case COUNTER: return Long.toString(row.getLong(position)); case BOOLEAN: return Boolean.toString(row.getBool(position)); case DOUBLE: return Double.toString(row.getDouble(position)); case FLOAT: return Float.toString(row.getFloat(position)); case DECIMAL: return row.getDecimal(position).toString(); case UUID: case TIMEUUID: return row.getUUID(position).toString(); case TIMESTAMP: return Long.toString(row.getTimestamp(position).getTime()); case DATE: return row.getDate(position).toString(); case INET: return quoteStringLiteral(toAddrString(row.getInet(position))); case VARINT: return row.getVarint(position).toString(); case BLOB: case CUSTOM: return Bytes.toHexString(row.getBytesUnsafe(position)); default: throw new IllegalStateException("Handling of type " + this + " is not implemented"); } } // TODO unify with getColumnValueForCql public String toCqlLiteral(Object prestoNativeValue) { if (this == TIMESTAMP) { return String.valueOf(unpackMillisUtc((Long) prestoNativeValue)); } String value; if (prestoNativeValue instanceof Slice) { value = ((Slice) prestoNativeValue).toStringUtf8(); } else { value = prestoNativeValue.toString(); } switch (this) { case ASCII: case TEXT: case VARCHAR: return quoteStringLiteral(value); case INET: // remove '/' in the string. e.g. /127.0.0.1 return quoteStringLiteral(value.substring(1)); default: return value; } } private static String objectToJson(Object cassandraValue, DataType dataType) { CassandraType cassandraType = toCassandraType(dataType.getName()) .orElseThrow(() -> new IllegalStateException("Unsupported type: " + dataType)); switch (cassandraType) { case ASCII: case TEXT: case VARCHAR: case UUID: case TIMEUUID: case TIMESTAMP: case DATE: case INET: case VARINT: return quoteStringLiteralForJson(cassandraValue.toString()); case BLOB: case CUSTOM: return quoteStringLiteralForJson(Bytes.toHexString((ByteBuffer) cassandraValue)); case SMALLINT: case TINYINT: case INT: case BIGINT: case COUNTER: case BOOLEAN: case DOUBLE: case FLOAT: case DECIMAL: return cassandraValue.toString(); case LIST: case SET: return buildArrayValue((Collection<?>) cassandraValue, getOnlyElement(dataType.getTypeArguments())); case MAP: return buildMapValue((Map<?, ?>) cassandraValue, dataType.getTypeArguments().get(0), dataType.getTypeArguments().get(1)); default: throw new IllegalStateException("Unsupported type: " + cassandraType); } } public Object getJavaValue(Object prestoNativeValue) { switch (this) { case ASCII: case TEXT: case VARCHAR: return ((Slice) prestoNativeValue).toStringUtf8(); case BIGINT: case BOOLEAN: case DOUBLE: case COUNTER: return prestoNativeValue; case INET: return InetAddresses.forString(((Slice) prestoNativeValue).toStringUtf8()); case INT: case SMALLINT: case TINYINT: return ((Long) prestoNativeValue).intValue(); case FLOAT: // conversion can result in precision lost return intBitsToFloat(((Long) prestoNativeValue).intValue()); case DECIMAL: // conversion can result in precision lost // Presto uses double for decimal, so to keep the floating point precision, convert it to string. // Otherwise partition id doesn't match return new BigDecimal(prestoNativeValue.toString()); case TIMESTAMP: return new Date(unpackMillisUtc((Long) prestoNativeValue)); case DATE: return LocalDate.fromDaysSinceEpoch(((Long) prestoNativeValue).intValue()); case UUID: case TIMEUUID: return java.util.UUID.fromString(((Slice) prestoNativeValue).toStringUtf8()); case BLOB: case CUSTOM: return ((Slice) prestoNativeValue).toStringUtf8(); case VARINT: return new BigInteger(((Slice) prestoNativeValue).toStringUtf8()); case SET: case LIST: case MAP: default: throw new IllegalStateException("Back conversion not implemented for " + this); } } public boolean isSupportedPartitionKey() { switch (this) { case ASCII: case TEXT: case VARCHAR: case BIGINT: case BOOLEAN: case DOUBLE: case INET: case INT: case FLOAT: case DECIMAL: case TIMESTAMP: case UUID: case TIMEUUID: return true; case COUNTER: case BLOB: case CUSTOM: case VARINT: case SET: case LIST: case MAP: default: return false; } } public static boolean isFullySupported(DataType dataType) { if (toCassandraType(dataType.getName()).isEmpty()) { return false; } return dataType.getTypeArguments().stream() .allMatch(CassandraType::isFullySupported); } public static CassandraType toCassandraType(Type type, ProtocolVersion protocolVersion) { if (type.equals(BooleanType.BOOLEAN)) { return BOOLEAN; } if (type.equals(BigintType.BIGINT)) { return BIGINT; } if (type.equals(IntegerType.INTEGER)) { return INT; } if (type.equals(SmallintType.SMALLINT)) { return SMALLINT; } if (type.equals(TinyintType.TINYINT)) { return TINYINT; } if (type.equals(DoubleType.DOUBLE)) { return DOUBLE; } if (type.equals(RealType.REAL)) { return FLOAT; } if (isVarcharType(type)) { return TEXT; } if (type.equals(DateType.DATE)) { return protocolVersion.toInt() <= ProtocolVersion.V3.toInt() ? TEXT : DATE; } if (type.equals(VarbinaryType.VARBINARY)) { return BLOB; } if (type.equals(TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE)) { return TIMESTAMP; } throw new IllegalArgumentException("unsupported type: " + type); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package app.view.component.customer; import app.model.Customer; import app.service.CustomerService; import app.view.Main; import java.awt.event.ActionListener; import java.util.Date; import java.util.List; import javax.swing.JOptionPane; /** * * @author Administrator */ public class DialogCustomerForm extends javax.swing.JDialog { private Customer model; public Customer getModel() { return model; } public void setModel(Customer model) { this.model = model; displayModel(); } private void displayModel() { if (model != null) { jTextField1.setText(model.getName()); jTextField2.setText(model.getPhone()); jTextArea1.setText(model.getAddress()); } } private void loadDatatoModel() { if (model == null) { model = new Customer(); } model.setName(jTextField1.getText()); model.setPhone(jTextField2.getText()); model.setAddress(jTextArea1.getText()); } private boolean isEdit = false; public void setEditMode(boolean isEdit) { this.isEdit = isEdit; } private void clearMes() { jLabel4.setText(""); jLabel5.setText(""); } private void clear() { displayModel(); } private boolean isValidForm() { boolean valid = true; clearMes(); String cusname = jTextField1.getText(); String cusphone = jTextField2.getText(); if (cusname.equals("")) { jLabel4.setText("Customer name is required."); valid = false; } if (cusphone.equals("")) { jLabel5.setText("Customer Phone is required."); valid = false; } if (!cusphone.equals("")) { List<Customer> customers = customerService.executePrepareStmt(customerService.getPStmtFindCustomer(cusphone)); if (customers.size() > 0 && (!isEdit || !cusname.equals(model.getName()))) { jLabel5.setText("Phone is used."); valid = false; } } return valid; } private ActionListener saveListener; public void setSaveListener(ActionListener saveListener) { this.saveListener = saveListener; } private CustomerService customerService; /** * Creates new form DialogCustomerForm */ public DialogCustomerForm(java.awt.Frame parent, boolean modal) { super(parent, modal); initComponents(); customerService = CustomerService.getInstance(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jPanel1 = new javax.swing.JPanel(); jLabel1 = new javax.swing.JLabel(); jTextField1 = new javax.swing.JTextField(); jTextField2 = new javax.swing.JTextField(); jLabel2 = new javax.swing.JLabel(); jLabel3 = new javax.swing.JLabel(); jButton1 = new javax.swing.JButton(); jButton2 = new javax.swing.JButton(); jLabel4 = new javax.swing.JLabel(); jLabel5 = new javax.swing.JLabel(); jScrollPane1 = new javax.swing.JScrollPane(); jTextArea1 = new javax.swing.JTextArea(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); jLabel1.setText("Name :"); jTextField2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jTextField2ActionPerformed(evt); } }); jLabel2.setText("Phone :"); jLabel3.setText("Address :"); jButton1.setText("Save"); jButton1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton1ActionPerformed(evt); } }); jButton2.setText("Reset"); jButton2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton2ActionPerformed(evt); } }); jLabel4.setForeground(new java.awt.Color(255, 0, 0)); jLabel5.setForeground(java.awt.Color.red); jTextArea1.setColumns(20); jTextArea1.setRows(5); jScrollPane1.setViewportView(jTextArea1); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(26, 26, 26) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel1) .addComponent(jLabel2) .addComponent(jLabel3) .addComponent(jButton1)) .addGap(34, 34, 34) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(jTextField1) .addComponent(jTextField2, javax.swing.GroupLayout.DEFAULT_SIZE, 126, Short.MAX_VALUE) .addComponent(jLabel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jLabel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addComponent(jButton2)) .addContainerGap(32, Short.MAX_VALUE)) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(69, 69, 69) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel4) .addGap(12, 12, 12) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel2) .addComponent(jTextField2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(1, 1, 1) .addComponent(jLabel5) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(94, Short.MAX_VALUE)) .addGroup(jPanel1Layout.createSequentialGroup() .addComponent(jLabel3) .addGap(103, 103, 103) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jButton1) .addComponent(jButton2)) .addGap(20, 20, 20)))) ); getContentPane().add(jPanel1, java.awt.BorderLayout.CENTER); pack(); }// </editor-fold>//GEN-END:initComponents private void jTextField2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jTextField2ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_jTextField2ActionPerformed private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed // TODO add your handling code here: if (isValidForm()) { int choice = JOptionPane.showConfirmDialog(this, "Are you sure you want to save?", "Confirm Save", JOptionPane.YES_NO_OPTION); if (choice == JOptionPane.NO_OPTION) { return; } loadDatatoModel(); Date date = new Date(); java.sql.Date inputdate = new java.sql.Date(date.getTime()); model.setModified(inputdate); if (isEdit) { customerService.update(model); } else { model.setCreated(inputdate); model.setDealerId(Main.activeUser.getId()); customerService.add(model); } JOptionPane.showMessageDialog(this, "Save Success."); if (saveListener != null) { saveListener.actionPerformed(evt); } } }//GEN-LAST:event_jButton1ActionPerformed private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed // TODO add your handling code here: clear(); }//GEN-LAST:event_jButton2ActionPerformed /** * @param args the command line arguments */ public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(DialogCustomerForm.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(DialogCustomerForm.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(DialogCustomerForm.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(DialogCustomerForm.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the dialog */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { DialogCustomerForm dialog = new DialogCustomerForm(new javax.swing.JFrame(), true); dialog.addWindowListener(new java.awt.event.WindowAdapter() { @Override public void windowClosing(java.awt.event.WindowEvent e) { System.exit(0); } }); dialog.setVisible(true); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton jButton1; private javax.swing.JButton jButton2; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JPanel jPanel1; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JTextArea jTextArea1; private javax.swing.JTextField jTextField1; private javax.swing.JTextField jTextField2; // End of variables declaration//GEN-END:variables }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.serde2.avro; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.avro.Schema; import org.apache.hadoop.hive.common.StringInternUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.Writable; /** * Read or write Avro data from Hive. */ @SerDeSpec(schemaProps = { serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES, AvroSerDe.LIST_COLUMN_COMMENTS, AvroSerDe.TABLE_NAME, AvroSerDe.TABLE_COMMENT, AvroSerdeUtils.SCHEMA_LITERAL, AvroSerdeUtils.SCHEMA_URL, AvroSerdeUtils.SCHEMA_NAMESPACE, AvroSerdeUtils.SCHEMA_NAME, AvroSerdeUtils.SCHEMA_DOC}) public class AvroSerDe extends AbstractSerDe { private static final Logger LOG = LoggerFactory.getLogger(AvroSerDe.class); public static final String TABLE_NAME = "name"; public static final String TABLE_COMMENT = "comment"; public static final String LIST_COLUMN_COMMENTS = "columns.comments"; public static final String DECIMAL_TYPE_NAME = "decimal"; public static final String CHAR_TYPE_NAME = "char"; public static final String VARCHAR_TYPE_NAME = "varchar"; public static final String DATE_TYPE_NAME = "date"; public static final String TIMESTAMP_TYPE_NAME = "timestamp-millis"; public static final String WRITER_TIME_ZONE = "writer.time.zone"; public static final String WRITER_PROLEPTIC = "writer.proleptic"; public static final String AVRO_PROP_LOGICAL_TYPE = "logicalType"; public static final String AVRO_PROP_PRECISION = "precision"; public static final String AVRO_PROP_SCALE = "scale"; public static final String AVRO_PROP_MAX_LENGTH = "maxLength"; public static final String AVRO_STRING_TYPE_NAME = "string"; public static final String AVRO_INT_TYPE_NAME = "int"; public static final String AVRO_LONG_TYPE_NAME = "long"; private ObjectInspector oi; private List<String> columnNames; private List<TypeInfo> columnTypes; private Schema schema; private AvroDeserializer avroDeserializer = null; private AvroSerializer avroSerializer = null; private boolean badSchema = false; @Override public void initialize(Configuration configuration, Properties tableProperties, Properties partitionProperties) throws SerDeException { /* * Avro should always use the table properties for initialization (see * HIVE-6835). The tableProperties is modified directly by this SerDe when * the user supplies a schema file so do not make a copy. */ super.initialize(configuration, tableProperties, null); // Reset member variables so we don't get in a half-constructed state if (schema != null) { LOG.debug("Resetting already initialized AvroSerDe"); } LOG.debug("AvroSerde::initialize(): Preset value of avro.schema.literal == " + tableProperties.get(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName())); schema = null; oi = null; columnNames = null; columnTypes = null; final String columnNameProperty = tableProperties.getProperty(serdeConstants.LIST_COLUMNS); final String columnTypeProperty = tableProperties.getProperty(serdeConstants.LIST_COLUMN_TYPES); final String columnCommentProperty = tableProperties.getProperty(LIST_COLUMN_COMMENTS, ""); final String columnNameDelimiter = tableProperties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tableProperties.getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); boolean gotColTypesFromColProps = true; if (hasExternalSchema(tableProperties) || columnNameProperty == null || columnNameProperty.isEmpty() || columnTypeProperty == null || columnTypeProperty.isEmpty()) { schema = determineSchemaOrReturnErrorSchema(configuration, tableProperties); gotColTypesFromColProps = false; } else { // Get column names and sort order columnNames = StringInternUtils.internStringsInList( Arrays.asList(columnNameProperty.split(columnNameDelimiter))); columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty); schema = getSchemaFromCols(tableProperties, columnNames, columnTypes, columnCommentProperty); } tableProperties.setProperty(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName(), schema.toString()); LOG.debug("Avro schema is: {}", schema); if (this.configuration.isPresent()) { this.configuration.get().set(AvroSerdeUtils.AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName(), schema.toString(false)); } else { LOG.debug("Configuration null, not inserting schema"); } badSchema = (schema == SchemaResolutionProblem.SIGNAL_BAD_SCHEMA); AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(schema); this.columnNames = StringInternUtils.internStringsInList(aoig.getColumnNames()); this.columnTypes = aoig.getColumnTypes(); this.oi = aoig.getObjectInspector(); // HIVE-22595: Update the column/type properties to reflect the current, since the // these properties may be used if (!gotColTypesFromColProps) { LOG.info("Updating column name/type properties based on current schema"); tableProperties.setProperty(serdeConstants.LIST_COLUMNS, String.join(",", columnNames)); tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, String.join(",", TypeInfoUtils.getTypeStringsFromTypeInfo(columnTypes))); } if (badSchema) { throw new SerDeException("Invalid schema reported"); } this.avroSerializer = new AvroSerializer(configuration); this.avroDeserializer = new AvroDeserializer(configuration); } private boolean hasExternalSchema(Properties properties) { return properties.getProperty(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName()) != null || properties.getProperty(AvroSerdeUtils.AvroTableProperties.SCHEMA_URL.getPropName()) != null; } private boolean hasExternalSchema(Map<String, String> tableParams) { return tableParams.containsKey(AvroSerdeUtils.AvroTableProperties.SCHEMA_LITERAL.getPropName()) || tableParams.containsKey(AvroSerdeUtils.AvroTableProperties.SCHEMA_URL.getPropName()); } public static Schema getSchemaFromCols(Properties properties, List<String> columnNames, List<TypeInfo> columnTypes, String columnCommentProperty) { List<String> columnComments; if (columnCommentProperty == null || columnCommentProperty.isEmpty()) { columnComments = new ArrayList<String>(); } else { //Comments are separated by "\0" in columnCommentProperty, see method getSchema //in MetaStoreUtils where this string columns.comments is generated columnComments = Arrays.asList(columnCommentProperty.split("\0")); LOG.debug("columnComments is {}", columnCommentProperty); } if (columnNames.size() != columnTypes.size()) { throw new IllegalArgumentException("AvroSerde initialization failed. Number of column " + "name and column type differs. columnNames = " + columnNames + ", columnTypes = " + columnTypes); } final String tableName = properties.getProperty(TABLE_NAME); final String tableComment = properties.getProperty(TABLE_COMMENT); TypeInfoToSchema typeInfoToSchema = new TypeInfoToSchema(); return typeInfoToSchema.convert(columnNames, columnTypes, columnComments, properties.getProperty(AvroSerdeUtils.AvroTableProperties.SCHEMA_NAMESPACE.getPropName()), properties.getProperty(AvroSerdeUtils.AvroTableProperties.SCHEMA_NAME.getPropName(), tableName), properties.getProperty(AvroSerdeUtils.AvroTableProperties.SCHEMA_DOC.getPropName(), tableComment)); } /** * Attempt to determine the schema via the usual means, but do not throw * an exception if we fail. Instead, signal failure via a special * schema. This is used because Hive calls init on the serde during * any call, including calls to update the serde properties, meaning * if the serde is in a bad state, there is no way to update that state. */ private Schema determineSchemaOrReturnErrorSchema(Configuration conf, Properties props) { try { return AvroSerdeUtils.determineSchemaOrThrowException(conf, props); } catch (AvroSerdeException he) { LOG.warn("Encountered AvroSerdeException determining schema. Returning signal schema to indicate problem", he); return SchemaResolutionProblem.SIGNAL_BAD_SCHEMA; } catch (Exception e) { LOG.warn("Encountered exception determining schema. Returning signal schema to indicate problem", e); return SchemaResolutionProblem.SIGNAL_BAD_SCHEMA; } } @Override public Class<? extends Writable> getSerializedClass() { return AvroGenericRecordWritable.class; } @Override public Writable serialize(Object o, ObjectInspector objectInspector) throws SerDeException { if(badSchema) { throw new BadSchemaException(); } return avroSerializer.serialize(o, objectInspector, columnNames, columnTypes, schema); } @Override public Object deserialize(Writable writable) throws SerDeException { if(badSchema) { throw new BadSchemaException(); } return avroDeserializer.deserialize(columnNames, columnTypes, writable, schema); } @Override public ObjectInspector getObjectInspector() throws SerDeException { return oi; } @Override public boolean shouldStoreFieldsInMetastore(Map<String, String> tableParams) { return !hasExternalSchema(tableParams); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ambari.logfeeder.input; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.ambari.logfeeder.input.cache.LRUCache; import org.apache.ambari.logfeeder.common.ConfigBlock; import org.apache.ambari.logfeeder.common.LogfeederException; import org.apache.ambari.logfeeder.filter.Filter; import org.apache.ambari.logfeeder.metrics.MetricData; import org.apache.ambari.logfeeder.output.Output; import org.apache.ambari.logfeeder.output.OutputManager; import org.apache.ambari.logfeeder.util.LogFeederUtil; import org.apache.log4j.Logger; public abstract class Input extends ConfigBlock implements Runnable { private static final Logger LOG = Logger.getLogger(Input.class); private static final boolean DEFAULT_TAIL = true; private static final boolean DEFAULT_USE_EVENT_MD5 = false; private static final boolean DEFAULT_GEN_EVENT_MD5 = true; private static final boolean DEFAULT_CACHE_ENABLED = false; private static final boolean DEFAULT_CACHE_DEDUP_LAST = false; private static final int DEFAULT_CACHE_SIZE = 100; private static final long DEFAULT_CACHE_DEDUP_INTERVAL = 1000; private static final String DEFAULT_CACHE_KEY_FIELD = "log_message"; private static final String CACHE_ENABLED = "cache_enabled"; private static final String CACHE_KEY_FIELD = "cache_key_field"; private static final String CACHE_LAST_DEDUP_ENABLED = "cache_last_dedup_enabled"; private static final String CACHE_SIZE = "cache_size"; private static final String CACHE_DEDUP_INTERVAL = "cache_dedup_interval"; protected InputManager inputManager; protected OutputManager outputManager; private List<Output> outputList = new ArrayList<Output>(); private Thread thread; private String type; protected String filePath; private Filter firstFilter; private boolean isClosed; protected boolean tail; private boolean useEventMD5; private boolean genEventMD5; private LRUCache cache; private String cacheKeyField; protected MetricData readBytesMetric = new MetricData(getReadBytesMetricName(), false); protected String getReadBytesMetricName() { return null; } @Override public void loadConfig(Map<String, Object> map) { super.loadConfig(map); String typeValue = getStringValue("type"); if (typeValue != null) { // Explicitly add type and value to field list contextFields.put("type", typeValue); @SuppressWarnings("unchecked") Map<String, Object> addFields = (Map<String, Object>) map.get("add_fields"); if (addFields == null) { addFields = new HashMap<String, Object>(); map.put("add_fields", addFields); } addFields.put("type", typeValue); } } public void setType(String type) { this.type = type; } public void setInputManager(InputManager inputManager) { this.inputManager = inputManager; } public void setOutputManager(OutputManager outputManager) { this.outputManager = outputManager; } public void addFilter(Filter filter) { if (firstFilter == null) { firstFilter = filter; } else { Filter f = firstFilter; while (f.getNextFilter() != null) { f = f.getNextFilter(); } f.setNextFilter(filter); } } public void addOutput(Output output) { outputList.add(output); } @Override public void init() throws Exception { super.init(); initCache(); tail = getBooleanValue("tail", DEFAULT_TAIL); useEventMD5 = getBooleanValue("use_event_md5_as_id", DEFAULT_USE_EVENT_MD5); genEventMD5 = getBooleanValue("gen_event_md5", DEFAULT_GEN_EVENT_MD5); if (firstFilter != null) { firstFilter.init(); } } boolean monitor() { if (isReady()) { LOG.info("Starting thread. " + getShortDescription()); thread = new Thread(this, getNameForThread()); thread.start(); return true; } else { return false; } } public abstract boolean isReady(); @Override public void run() { try { LOG.info("Started to monitor. " + getShortDescription()); start(); } catch (Exception e) { LOG.error("Error writing to output.", e); } LOG.info("Exiting thread. " + getShortDescription()); } /** * This method will be called from the thread spawned for the output. This * method should only exit after all data are read from the source or the * process is exiting */ abstract void start() throws Exception; protected void outputLine(String line, InputMarker marker) { statMetric.value++; readBytesMetric.value += (line.length()); if (firstFilter != null) { try { firstFilter.apply(line, marker); } catch (LogfeederException e) { LOG.error(e.getLocalizedMessage(), e); } } else { // TODO: For now, let's make filter mandatory, so that no one accidently forgets to write filter // outputManager.write(line, this); } } protected void flush() { if (firstFilter != null) { firstFilter.flush(); } } @Override public void setDrain(boolean drain) { LOG.info("Request to drain. " + getShortDescription()); super.setDrain(drain); try { thread.interrupt(); } catch (Throwable t) { // ignore } } public void addMetricsContainers(List<MetricData> metricsList) { super.addMetricsContainers(metricsList); if (firstFilter != null) { firstFilter.addMetricsContainers(metricsList); } metricsList.add(readBytesMetric); } @Override public void logStat() { super.logStat(); logStatForMetric(readBytesMetric, "Stat: Bytes Read"); if (firstFilter != null) { firstFilter.logStat(); } } public abstract void checkIn(InputMarker inputMarker); public abstract void lastCheckIn(); public void close() { LOG.info("Close called. " + getShortDescription()); try { if (firstFilter != null) { firstFilter.close(); } else { outputManager.close(); } } catch (Throwable t) { // Ignore } isClosed = true; } private void initCache() { boolean cacheEnabled = getConfigValue(CACHE_ENABLED) != null ? getBooleanValue(CACHE_ENABLED, DEFAULT_CACHE_ENABLED) : LogFeederUtil.getBooleanProperty("logfeeder.cache.enabled", DEFAULT_CACHE_ENABLED); if (cacheEnabled) { String cacheKeyField = getConfigValue(CACHE_KEY_FIELD) != null ? getStringValue(CACHE_KEY_FIELD) : LogFeederUtil.getStringProperty("logfeeder.cache.key.field", DEFAULT_CACHE_KEY_FIELD); setCacheKeyField(getStringValue(cacheKeyField)); boolean cacheLastDedupEnabled = getConfigValue(CACHE_LAST_DEDUP_ENABLED) != null ? getBooleanValue(CACHE_LAST_DEDUP_ENABLED, DEFAULT_CACHE_DEDUP_LAST) : LogFeederUtil.getBooleanProperty("logfeeder.cache.last.dedup.enabled", DEFAULT_CACHE_DEDUP_LAST); int cacheSize = getConfigValue(CACHE_SIZE) != null ? getIntValue(CACHE_SIZE, DEFAULT_CACHE_SIZE) : LogFeederUtil.getIntProperty("logfeeder.cache.size", DEFAULT_CACHE_SIZE); long cacheDedupInterval = getConfigValue(CACHE_DEDUP_INTERVAL) != null ? getLongValue(CACHE_DEDUP_INTERVAL, DEFAULT_CACHE_DEDUP_INTERVAL) : Long.parseLong(LogFeederUtil.getStringProperty("logfeeder.cache.dedup.interval", String.valueOf(DEFAULT_CACHE_DEDUP_INTERVAL))); setCache(new LRUCache(cacheSize, filePath, cacheDedupInterval, cacheLastDedupEnabled)); } } public boolean isTail() { return tail; } public boolean isUseEventMD5() { return useEventMD5; } public boolean isGenEventMD5() { return genEventMD5; } public Filter getFirstFilter() { return firstFilter; } public String getFilePath() { return filePath; } public void setFilePath(String filePath) { this.filePath = filePath; } public void setClosed(boolean isClosed) { this.isClosed = isClosed; } public boolean isClosed() { return isClosed; } public List<Output> getOutputList() { return outputList; } public Thread getThread(){ return thread; } public LRUCache getCache() { return cache; } public void setCache(LRUCache cache) { this.cache = cache; } public String getCacheKeyField() { return cacheKeyField; } public void setCacheKeyField(String cacheKeyField) { this.cacheKeyField = cacheKeyField; } @Override public String getNameForThread() { if (filePath != null) { try { return (type + "=" + (new File(filePath)).getName()); } catch (Throwable ex) { LOG.warn("Couldn't get basename for filePath=" + filePath, ex); } } return super.getNameForThread() + ":" + type; } @Override public String toString() { return getShortDescription(); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.vcs.log.graph.linearBek; import com.intellij.openapi.diagnostic.Logger; import com.intellij.util.containers.ContainerUtil; import com.intellij.vcs.log.graph.actions.GraphAction; import com.intellij.vcs.log.graph.api.EdgeFilter; import com.intellij.vcs.log.graph.api.GraphLayout; import com.intellij.vcs.log.graph.api.LinearGraph; import com.intellij.vcs.log.graph.api.elements.GraphEdge; import com.intellij.vcs.log.graph.api.elements.GraphEdgeType; import com.intellij.vcs.log.graph.api.elements.GraphElement; import com.intellij.vcs.log.graph.api.elements.GraphNode; import com.intellij.vcs.log.graph.api.permanent.PermanentGraphInfo; import com.intellij.vcs.log.graph.impl.facade.BekBaseController; import com.intellij.vcs.log.graph.impl.facade.CascadeController; import com.intellij.vcs.log.graph.impl.facade.GraphChangesUtil; import com.intellij.vcs.log.graph.impl.facade.bek.BekIntMap; import com.intellij.vcs.log.graph.utils.LinearGraphUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; public class LinearBekController extends CascadeController { private static final Logger LOG = Logger.getInstance(LinearBekController.class); @NotNull private final LinearBekGraph myCompiledGraph; private final LinearBekGraphBuilder myLinearBekGraphBuilder; private final BekGraphLayout myBekGraphLayout; public LinearBekController(@NotNull BekBaseController controller, @NotNull PermanentGraphInfo permanentGraphInfo) { super(controller, permanentGraphInfo); myCompiledGraph = new LinearBekGraph(getDelegateGraph()); myBekGraphLayout = new BekGraphLayout(permanentGraphInfo.getPermanentGraphLayout(), controller.getBekIntMap()); myLinearBekGraphBuilder = new LinearBekGraphBuilder(myCompiledGraph, myBekGraphLayout); long start = System.currentTimeMillis(); myLinearBekGraphBuilder.collapseAll(); LOG.debug("Linear bek took " + (System.currentTimeMillis() - start) / 1000.0 + " sec"); } @NotNull @Override protected LinearGraphAnswer delegateGraphChanged(@NotNull LinearGraphAnswer delegateAnswer) { return delegateAnswer; } @Nullable @Override protected LinearGraphAnswer performAction(@NotNull LinearGraphAction action) { if (action.getAffectedElement() != null) { if (action.getType() == GraphAction.Type.MOUSE_CLICK) { GraphElement graphElement = action.getAffectedElement().getGraphElement(); if (graphElement instanceof GraphNode) { LinearGraphAnswer answer = collapseNode((GraphNode)graphElement); if (answer != null) return answer; for (GraphEdge dottedEdge : getAllAdjacentDottedEdges((GraphNode)graphElement)) { LinearGraphAnswer expandedAnswer = expandEdge(dottedEdge); if (expandedAnswer != null) return expandedAnswer; } } else if (graphElement instanceof GraphEdge) { return expandEdge((GraphEdge)graphElement); } } else if (action.getType() == GraphAction.Type.MOUSE_OVER) { GraphElement graphElement = action.getAffectedElement().getGraphElement(); if (graphElement instanceof GraphNode) { LinearGraphAnswer answer = highlightNode((GraphNode)graphElement); if (answer != null) return answer; for (GraphEdge dottedEdge : getAllAdjacentDottedEdges((GraphNode)graphElement)) { LinearGraphAnswer highlightAnswer = highlightEdge(dottedEdge); if (highlightAnswer != null) return highlightAnswer; } } else if (graphElement instanceof GraphEdge) { return highlightEdge((GraphEdge)graphElement); } } } else if (action.getType() == GraphAction.Type.BUTTON_COLLAPSE) { return collapseAll(); } else if (action.getType() == GraphAction.Type.BUTTON_EXPAND) { return expandAll(); } return null; } @NotNull private List<GraphEdge> getAllAdjacentDottedEdges(GraphNode graphElement) { return ContainerUtil.filter(myCompiledGraph.getAdjacentEdges(graphElement.getNodeIndex(), EdgeFilter.ALL), graphEdge -> graphEdge.getType() == GraphEdgeType.DOTTED); } @NotNull private LinearGraphAnswer expandAll() { return new LinearGraphAnswer(GraphChangesUtil.SOME_CHANGES) { @Nullable @Override public Runnable getGraphUpdater() { return () -> { myCompiledGraph.myDottedEdges.removeAll(); myCompiledGraph.myHiddenEdges.removeAll(); }; } }; } @NotNull private LinearGraphAnswer collapseAll() { final LinearBekGraph.WorkingLinearBekGraph workingGraph = new LinearBekGraph.WorkingLinearBekGraph(myCompiledGraph); new LinearBekGraphBuilder(workingGraph, myBekGraphLayout).collapseAll(); return new LinearGraphAnswer( GraphChangesUtil.edgesReplaced(workingGraph.getRemovedEdges(), workingGraph.getAddedEdges(), getDelegateGraph())) { @Nullable @Override public Runnable getGraphUpdater() { return () -> workingGraph.applyChanges(); } }; } @Nullable private LinearGraphAnswer highlightNode(GraphNode node) { Set<LinearBekGraphBuilder.MergeFragment> toCollapse = collectFragmentsToCollapse(node); if (toCollapse.isEmpty()) return null; Set<Integer> toHighlight = ContainerUtil.newHashSet(); for (LinearBekGraphBuilder.MergeFragment fragment : toCollapse) { toHighlight.addAll(fragment.getAllNodes()); } return LinearGraphUtils.createSelectedAnswer(myCompiledGraph, toHighlight); } @Nullable private LinearGraphAnswer highlightEdge(GraphEdge edge) { if (edge.getType() == GraphEdgeType.DOTTED) { return LinearGraphUtils.createSelectedAnswer(myCompiledGraph, ContainerUtil.set(edge.getUpNodeIndex(), edge.getDownNodeIndex())); } return null; } @Nullable private LinearGraphAnswer collapseNode(GraphNode node) { SortedSet<Integer> toCollapse = collectNodesToCollapse(node); if (toCollapse.isEmpty()) return null; for (Integer i : toCollapse) { myLinearBekGraphBuilder.collapseFragment(i); } return new LinearGraphAnswer(GraphChangesUtil.SOME_CHANGES); } private SortedSet<Integer> collectNodesToCollapse(GraphNode node) { SortedSet<Integer> toCollapse = new TreeSet<>(Comparator.reverseOrder()); for (LinearBekGraphBuilder.MergeFragment f : collectFragmentsToCollapse(node)) { toCollapse.add(f.getParent()); toCollapse.addAll(f.getTailsAndBody()); } return toCollapse; } @NotNull private Set<LinearBekGraphBuilder.MergeFragment> collectFragmentsToCollapse(GraphNode node) { Set<LinearBekGraphBuilder.MergeFragment> result = ContainerUtil.newHashSet(); int mergesCount = 0; LinkedHashSet<Integer> toProcess = ContainerUtil.newLinkedHashSet(); toProcess.add(node.getNodeIndex()); while (!toProcess.isEmpty()) { Integer i = ContainerUtil.getFirstItem(toProcess); toProcess.remove(i); LinearBekGraphBuilder.MergeFragment fragment = myLinearBekGraphBuilder.getFragment(i); if (fragment == null) continue; result.add(fragment); toProcess.addAll(fragment.getTailsAndBody()); mergesCount++; if (mergesCount > 10) break; } return result; } @Nullable private LinearGraphAnswer expandEdge(GraphEdge edge) { if (edge.getType() == GraphEdgeType.DOTTED) { return new LinearGraphAnswer( GraphChangesUtil.edgesReplaced(Collections.singleton(edge), myCompiledGraph.expandEdge(edge), getDelegateGraph())); } return null; } @NotNull private LinearGraph getDelegateGraph() { return getDelegateController().getCompiledGraph(); } @NotNull @Override public LinearGraph getCompiledGraph() { return myCompiledGraph; } private static class BekGraphLayout implements GraphLayout { private final GraphLayout myGraphLayout; private final BekIntMap myBekIntMap; BekGraphLayout(GraphLayout graphLayout, BekIntMap bekIntMap) { myGraphLayout = graphLayout; myBekIntMap = bekIntMap; } @Override public int getLayoutIndex(int nodeIndex) { return myGraphLayout.getLayoutIndex(myBekIntMap.getUsualIndex(nodeIndex)); } @Override public int getOneOfHeadNodeIndex(int nodeIndex) { int usualIndex = myGraphLayout.getOneOfHeadNodeIndex(myBekIntMap.getUsualIndex(nodeIndex)); return myBekIntMap.getBekIndex(usualIndex); } @NotNull @Override public List<Integer> getHeadNodeIndex() { List<Integer> bekIndexes = new ArrayList<>(); for (int head : myGraphLayout.getHeadNodeIndex()) { bekIndexes.add(myBekIntMap.getBekIndex(head)); } return bekIndexes; } } }
package org.jgroups.protocols.jzookeeper; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Scanner; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.jgroups.Address; import org.jgroups.JChannel; import org.jgroups.Message; import org.jgroups.ReceiverAdapter; import org.jgroups.Version; import org.jgroups.View; import org.jgroups.conf.ClassConfigurator; import org.jgroups.jmx.JmxConfigurator; import org.jgroups.protocols.jzookeeper.MessageId; import org.jgroups.protocols.jzookeeper.Zab2PhasesHeader; import org.jgroups.util.Util; /* * This is for Zab2Phases */ public class ZabClients extends ReceiverAdapter { private String props; private static String ProtocotName = "Zab2Phases"; private JChannel channel; private Address local_addr = null; final AtomicInteger actually_sent = new AtomicInteger(0); private final CyclicBarrier barrier; private AtomicLong local = new AtomicLong(0); private final byte[] payload; private final long numsMsg; private long num_msgsPerThreads; private boolean startReset = true; private Sender sender; private long start, end, startTh, st=0; private volatile long msgReceived = 0; private List<Address> zabBox = new ArrayList<Address>(); private List<Long> latencies = new ArrayList<Long>(); private View view; private static Scanner read = new Scanner(System.in); private static Calendar cal = Calendar.getInstance(); private short ID = ClassConfigurator .getProtocolId(Zab2Phases.class); private static int load = 1; private static int count = 0; private long numSendMsg=0; private volatile boolean isSend = false; private static boolean is_warmUp = false; private int msgReceivedWarmUp = 0; private long warmUpRequests = 0; private long currentLoad = 0; private int sendTime = 0; // private long stt=0,et=0; private ZabTestThreadss zabTest= new ZabTestThreadss(); public ZabClients(List<Address> zabbox, CyclicBarrier barrier, long numsMsg, AtomicLong local, byte[] payload, String ProtocotName, long num_msgsPerThreads, String propsFile, int load, long warmUpRequests, int sendTime, ZabTestThreadss zabTest ) { this.barrier = barrier; this.local = local; this.payload = payload; this.numsMsg = numsMsg; this.zabBox =zabbox; this.ProtocotName = ProtocotName; this.num_msgsPerThreads = num_msgsPerThreads; this.warmUpRequests = warmUpRequests; this.props = propsFile; this.load = load; this.sendTime=sendTime; this.zabTest = zabTest; this.ID = ClassConfigurator .getProtocolId(Zab2Phases.class); } public void init() { startTh=0; st=0; msgReceived = 0; startReset = true; numSendMsg=0; msgReceivedWarmUp=0; latencies.clear(); } public void setWarmUp(boolean warmUp){ is_warmUp= warmUp; } public void viewAccepted(View new_view) { System.out.println("** view: " + new_view); view = new_view; System.out.println("** view: " + new_view); } public void start() throws Exception { StringBuilder sb = new StringBuilder(); sb.append("\n\n----------------------- ZABPerf -----------------------\n"); sb.append("Date: ").append(new Date()).append('\n'); sb.append("Run by: ").append(System.getProperty("user.name")) .append("\n"); sb.append("JGroups version: ").append(Version.description).append('\n'); System.out.println(sb); channel = new JChannel(props); channel.setReceiver(this); channel.connect("Zab2PAll"); local_addr = channel.getAddress(); //JmxConfigurator.registerChannel(channel, Util.getMBeanServer(), // "jgroups", "ZABCluster", true); Address coord = channel.getView().getMembers().get(0); } public void sendMessages(long numMsgs) { msgReceived=0; this.currentLoad = numMsgs; this.sender = new Sender(this.barrier, this.local, this.payload, numMsgs, load, sendTime); System.out.println("Start sending "+ sender.getName()); sender.start(); } private String getCurrentTimeStamp() { long timestamp = new Date().getTime(); cal.setTimeInMillis(timestamp); String timeString = new SimpleDateFormat("HH:mm:ss:SSS").format(cal .getTime()); return timeString; } public void receive(Message msg) { synchronized (this) { final Zab2PhasesHeader testHeader = (Zab2PhasesHeader) msg.getHeader(ID); MessageId message = testHeader.getMessageId(); if (testHeader.getType() != Zab2PhasesHeader.START_SENDING) { // System.out.println("senter " + sender.getName()+ // " has finished "+msgReceived+" ops"); if (is_warmUp){ msgReceived++; if(msgReceived>=warmUpRequests){ try { zabTest.finishedSend(); } catch (Exception e) { e.printStackTrace(); } } } //System.out.println("Current NumMsg = " + msgReceived); //notify send thread //isSend = false; else if(testHeader.getType()==Zab2PhasesHeader.RESPONSE){ //latencies.add((System.currentTimeMillis() - message.getStartTime())); // if (startReset) { // st = System.currentTimeMillis(); // startReset = false; // } //System.out.println(sender.getName() + " " // + "Time interval -----> " // + (System.currentTimeMillis() - st)); //if ((System.currentTimeMillis() - st) > 50) { //System.out.println("senter " + sender.getName() // + " has finished " + msgReceived + " ops"); //ZABTestThreads.finishedopsSoFar(msgReceived, sender); //st = System.currentTimeMillis(); // startReset = true; //} //System.out.println(sender.getName() + " " // + "msgReceived / numsMsg -----> " + msgReceived + " / " // + numsMsg); msgReceived++; //if (msgReceived >= currentLoad) { //ZABTestThreads.result(msgReceived, sender, //(System.currentTimeMillis() - startTh), latencies); //} } } } } public class Sender extends Thread { private final CyclicBarrier barrier; private AtomicLong local = new AtomicLong(0);// , actually_sent; private final byte[] payload; private long num_msgsPerThreads; private int load=1; private int sendTime=100; protected Sender(CyclicBarrier barrier, AtomicLong local, byte[] payload, long num_msgsPerThreads, int load,int sendTime) { super("" + (count++)); this.barrier = barrier; this.payload = payload; this.local = local; this.num_msgsPerThreads = num_msgsPerThreads; this.load = load; this.sendTime=sendTime; } public void run() { System.out.println("Thread start " + getName()); // try { // barrier.await(); // } // catch(Exception e) { // e.printStackTrace(); // return; // } Address target; st = System.currentTimeMillis(); startTh = System.currentTimeMillis(); numSendMsg =0; for (int i = 0; i < num_msgsPerThreads; i++) { numSendMsg = i; // if(is_warmUp){ while ((numSendMsg - msgReceived) > load){ try { Thread.sleep(0,1); } catch (InterruptedException e1) { e1.printStackTrace(); } } // } try { MessageId messageId = new MessageId(local_addr, local.getAndIncrement(), System.currentTimeMillis()); Zab2PhasesHeader hdrReq = new Zab2PhasesHeader(Zab2PhasesHeader.REQUEST, messageId); target = Util.pickRandomElement(zabBox); Message msg = new Message(target, payload); msg.putHeader(ID, hdrReq); // System.out.println("sender " + this.getName()+ " Sending " + i + " out of " + num_msgsPerThreads); if(!is_warmUp){ // stt=System.currentTimeMillis(); // System.out.println("sendTime="+sendTime); Thread.sleep(sendTime); // et=System.currentTimeMillis(); // System.out.println("et-stt="+(et-stt)); } channel.send(msg); //isSend = true; //while (isSend){ //wait until notify //} } catch (Exception e) { } } } } }
package weixin.popular.bean.paymch; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; @XmlRootElement(name="xml") @XmlAccessorType(XmlAccessType.FIELD) public class QueryCouponResult extends MchBase{ private String sub_mch_id; private String device_info; private String coupon_stock_id; private Integer coupon_stock_type; private String coupon_id; private Integer coupon_value; private Integer coupon_mininumn; private String coupon_name; private Integer coupon_state; private Integer coupon_type; private String coupon_desc; private Integer coupon_use_value; private Integer coupon_remain_value; private String begin_time; private String end_time; private String send_time; private String use_time; private String trade_no; private String consumer_mch_id; private String consumer_mch_name; private String consumer_mch_appid; private String send_source; private String is_partial_use; public String getSub_mch_id() { return sub_mch_id; } public void setSub_mch_id(String sub_mch_id) { this.sub_mch_id = sub_mch_id; } public String getDevice_info() { return device_info; } public void setDevice_info(String device_info) { this.device_info = device_info; } public String getCoupon_stock_id() { return coupon_stock_id; } public void setCoupon_stock_id(String coupon_stock_id) { this.coupon_stock_id = coupon_stock_id; } public Integer getCoupon_stock_type() { return coupon_stock_type; } public void setCoupon_stock_type(Integer coupon_stock_type) { this.coupon_stock_type = coupon_stock_type; } public String getCoupon_id() { return coupon_id; } public void setCoupon_id(String coupon_id) { this.coupon_id = coupon_id; } public Integer getCoupon_value() { return coupon_value; } public void setCoupon_value(Integer coupon_value) { this.coupon_value = coupon_value; } public Integer getCoupon_mininumn() { return coupon_mininumn; } public void setCoupon_mininumn(Integer coupon_mininumn) { this.coupon_mininumn = coupon_mininumn; } public String getCoupon_name() { return coupon_name; } public void setCoupon_name(String coupon_name) { this.coupon_name = coupon_name; } public Integer getCoupon_state() { return coupon_state; } public void setCoupon_state(Integer coupon_state) { this.coupon_state = coupon_state; } public Integer getCoupon_type() { return coupon_type; } public void setCoupon_type(Integer coupon_type) { this.coupon_type = coupon_type; } public String getCoupon_desc() { return coupon_desc; } public void setCoupon_desc(String coupon_desc) { this.coupon_desc = coupon_desc; } public Integer getCoupon_use_value() { return coupon_use_value; } public void setCoupon_use_value(Integer coupon_use_value) { this.coupon_use_value = coupon_use_value; } public Integer getCoupon_remain_value() { return coupon_remain_value; } public void setCoupon_remain_value(Integer coupon_remain_value) { this.coupon_remain_value = coupon_remain_value; } public String getBegin_time() { return begin_time; } public void setBegin_time(String begin_time) { this.begin_time = begin_time; } public String getEnd_time() { return end_time; } public void setEnd_time(String end_time) { this.end_time = end_time; } public String getSend_time() { return send_time; } public void setSend_time(String send_time) { this.send_time = send_time; } public String getUse_time() { return use_time; } public void setUse_time(String use_time) { this.use_time = use_time; } public String getTrade_no() { return trade_no; } public void setTrade_no(String trade_no) { this.trade_no = trade_no; } public String getConsumer_mch_id() { return consumer_mch_id; } public void setConsumer_mch_id(String consumer_mch_id) { this.consumer_mch_id = consumer_mch_id; } public String getConsumer_mch_name() { return consumer_mch_name; } public void setConsumer_mch_name(String consumer_mch_name) { this.consumer_mch_name = consumer_mch_name; } public String getConsumer_mch_appid() { return consumer_mch_appid; } public void setConsumer_mch_appid(String consumer_mch_appid) { this.consumer_mch_appid = consumer_mch_appid; } public String getSend_source() { return send_source; } public void setSend_source(String send_source) { this.send_source = send_source; } public String getIs_partial_use() { return is_partial_use; } public void setIs_partial_use(String is_partial_use) { this.is_partial_use = is_partial_use; } }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.net; import android.content.Context; import android.net.http.HttpResponseCache; import android.support.annotation.VisibleForTesting; import android.util.Log; import java.io.IOException; import java.net.URL; import java.net.URLConnection; import java.net.URLStreamHandlerFactory; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.Executor; import javax.net.ssl.HttpsURLConnection; /** * An engine to process {@link UrlRequest}s, which uses the best HTTP stack * available on the current platform. An instance of this class can be created * using {@link Builder}. */ public abstract class CronetEngine { private static final String TAG = CronetEngine.class.getSimpleName(); /** * A builder for {@link CronetEngine}s, which allows runtime configuration of * {@code CronetEngine}. Configuration options are set on the builder and * then {@link #build} is called to create the {@code CronetEngine}. */ // NOTE(kapishnikov): In order to avoid breaking the existing API clients, all future methods // added to this class and other API classes must have default implementation. public static class Builder { /** * A class which provides a method for loading the cronet native library. Apps needing to * implement custom library loading logic can inherit from this class and pass an instance * to {@link CronetEngine.Builder#setLibraryLoader}. For example, this might be required * to work around {@code UnsatisfiedLinkError}s caused by flaky installation on certain * older devices. */ public abstract static class LibraryLoader { /** * Loads the native library. * @param libName name of the library to load */ public abstract void loadLibrary(String libName); } /** * Reference to the actual builder implementation. * {@hide exclude from JavaDoc}. */ protected final ICronetEngineBuilder mBuilderDelegate; /** * Constructs a {@link Builder} object that facilitates creating a * {@link CronetEngine}. The default configuration enables HTTP/2 and * disables QUIC and the HTTP cache. * * @param context Android {@link Context}, which is used by * {@link Builder} to retrieve the application * context. A reference to only the application * context will be kept, so as to avoid extending * the lifetime of {@code context} unnecessarily. */ public Builder(Context context) { this(createBuilderDelegate(context)); } /** * Constructs {@link Builder} with a given delegate that provides the actual implementation * of the {@code Builder} methods. This constructor is used only by the internal * implementation. * * @param builderDelegate delegate that provides the actual implementation. * * {@hide} */ public Builder(ICronetEngineBuilder builderDelegate) { mBuilderDelegate = builderDelegate; } /** * Constructs a User-Agent string including application name and version, * system build version, model and id, and Cronet version. * * @return User-Agent string. */ public String getDefaultUserAgent() { return mBuilderDelegate.getDefaultUserAgent(); } /** * Overrides the User-Agent header for all requests. An explicitly * set User-Agent header (set using * {@link UrlRequest.Builder#addHeader}) will override a value set * using this function. * * @param userAgent the User-Agent string to use for all requests. * @return the builder to facilitate chaining. */ public Builder setUserAgent(String userAgent) { mBuilderDelegate.setUserAgent(userAgent); return this; } /** * Sets directory for HTTP Cache and Cookie Storage. The directory must * exist. * <p> * <b>NOTE:</b> Do not use the same storage directory with more than one * {@code CronetEngine} at a time. Access to the storage directory does * not support concurrent access by multiple {@code CronetEngine}s. * * @param value path to existing directory. * @return the builder to facilitate chaining. */ public Builder setStoragePath(String value) { mBuilderDelegate.setStoragePath(value); return this; } /** * Sets a {@link LibraryLoader} to be used to load the native library. * If not set, the library will be loaded using {@link System#loadLibrary}. * @param loader {@code LibraryLoader} to be used to load the native library. * @return the builder to facilitate chaining. */ public Builder setLibraryLoader(LibraryLoader loader) { mBuilderDelegate.setLibraryLoader(loader); return this; } /** * Sets whether <a href="https://www.chromium.org/quic">QUIC</a> protocol * is enabled. Defaults to disabled. If QUIC is enabled, then QUIC User Agent Id * containing application name and Cronet version is sent to the server. * @param value {@code true} to enable QUIC, {@code false} to disable. * @return the builder to facilitate chaining. */ public Builder enableQuic(boolean value) { mBuilderDelegate.enableQuic(value); return this; } /** * Sets whether <a href="https://tools.ietf.org/html/rfc7540">HTTP/2</a> * protocol is enabled. Defaults to enabled. * @param value {@code true} to enable HTTP/2, {@code false} to disable. * @return the builder to facilitate chaining. */ public Builder enableHttp2(boolean value) { mBuilderDelegate.enableHttp2(value); return this; } /** * @deprecated SDCH is deprecated in Cronet M63. This method is a no-op. * {@hide exclude from JavaDoc}. */ @Deprecated public Builder enableSdch(boolean value) { return this; } /** * Sets whether <a href="https://tools.ietf.org/html/rfc7932">Brotli</a> compression is * enabled. If enabled, Brotli will be advertised in Accept-Encoding request headers. * Defaults to disabled. * @param value {@code true} to enable Brotli, {@code false} to disable. * @return the builder to facilitate chaining. */ public Builder enableBrotli(boolean value) { mBuilderDelegate.enableBrotli(value); return this; } /** * Setting to disable HTTP cache. Some data may still be temporarily stored in memory. * Passed to {@link #enableHttpCache}. */ public static final int HTTP_CACHE_DISABLED = 0; /** * Setting to enable in-memory HTTP cache, including HTTP data. * Passed to {@link #enableHttpCache}. */ public static final int HTTP_CACHE_IN_MEMORY = 1; /** * Setting to enable on-disk cache, excluding HTTP data. * {@link #setStoragePath} must be called prior to passing this constant to * {@link #enableHttpCache}. */ public static final int HTTP_CACHE_DISK_NO_HTTP = 2; /** * Setting to enable on-disk cache, including HTTP data. * {@link #setStoragePath} must be called prior to passing this constant to * {@link #enableHttpCache}. */ public static final int HTTP_CACHE_DISK = 3; /** * Enables or disables caching of HTTP data and other information like QUIC * server information. * @param cacheMode control location and type of cached data. Must be one of * {@link #HTTP_CACHE_DISABLED HTTP_CACHE_*}. * @param maxSize maximum size in bytes used to cache data (advisory and maybe * exceeded at times). * @return the builder to facilitate chaining. */ public Builder enableHttpCache(int cacheMode, long maxSize) { mBuilderDelegate.enableHttpCache(cacheMode, maxSize); return this; } /** * Adds hint that {@code host} supports QUIC. * Note that {@link #enableHttpCache enableHttpCache} * ({@link #HTTP_CACHE_DISK}) is needed to take advantage of 0-RTT * connection establishment between sessions. * * @param host hostname of the server that supports QUIC. * @param port host of the server that supports QUIC. * @param alternatePort alternate port to use for QUIC. * @return the builder to facilitate chaining. */ public Builder addQuicHint(String host, int port, int alternatePort) { mBuilderDelegate.addQuicHint(host, port, alternatePort); return this; } /** * <p> * Pins a set of public keys for a given host. By pinning a set of public keys, * {@code pinsSha256}, communication with {@code hostName} is required to * authenticate with a certificate with a public key from the set of pinned ones. * An app can pin the public key of the root certificate, any of the intermediate * certificates or the end-entry certificate. Authentication will fail and secure * communication will not be established if none of the public keys is present in the * host's certificate chain, even if the host attempts to authenticate with a * certificate allowed by the device's trusted store of certificates. * </p> * <p> * Calling this method multiple times with the same host name overrides the previously * set pins for the host. * </p> * <p> * More information about the public key pinning can be found in * <a href="https://tools.ietf.org/html/rfc7469">RFC 7469</a>. * </p> * * @param hostName name of the host to which the public keys should be pinned. A host that * consists only of digits and the dot character is treated as invalid. * @param pinsSha256 a set of pins. Each pin is the SHA-256 cryptographic * hash of the DER-encoded ASN.1 representation of the Subject Public * Key Info (SPKI) of the host's X.509 certificate. Use * {@link java.security.cert.Certificate#getPublicKey() * Certificate.getPublicKey()} and * {@link java.security.Key#getEncoded() Key.getEncoded()} * to obtain DER-encoded ASN.1 representation of the SPKI. * Although, the method does not mandate the presence of the backup pin * that can be used if the control of the primary private key has been * lost, it is highly recommended to supply one. * @param includeSubdomains indicates whether the pinning policy should be applied to * subdomains of {@code hostName}. * @param expirationDate specifies the expiration date for the pins. * @return the builder to facilitate chaining. * @throws NullPointerException if any of the input parameters are {@code null}. * @throws IllegalArgumentException if the given host name is invalid or {@code pinsSha256} * contains a byte array that does not represent a valid * SHA-256 hash. */ public Builder addPublicKeyPins(String hostName, Set<byte[]> pinsSha256, boolean includeSubdomains, Date expirationDate) { mBuilderDelegate.addPublicKeyPins( hostName, pinsSha256, includeSubdomains, expirationDate); return this; } /** * Enables or disables public key pinning bypass for local trust anchors. Disabling the * bypass for local trust anchors is highly discouraged since it may prohibit the app * from communicating with the pinned hosts. E.g., a user may want to send all traffic * through an SSL enabled proxy by changing the device proxy settings and adding the * proxy certificate to the list of local trust anchor. Disabling the bypass will most * likly prevent the app from sending any traffic to the pinned hosts. For more * information see 'How does key pinning interact with local proxies and filters?' at * https://www.chromium.org/Home/chromium-security/security-faq * * @param value {@code true} to enable the bypass, {@code false} to disable. * @return the builder to facilitate chaining. */ public Builder enablePublicKeyPinningBypassForLocalTrustAnchors(boolean value) { mBuilderDelegate.enablePublicKeyPinningBypassForLocalTrustAnchors(value); return this; } public Builder setHostResolver(HostResolver hostResolver) { mBuilderDelegate.setHostResolver(hostResolver); return this; } /** * Build a {@link CronetEngine} using this builder's configuration. * @return constructed {@link CronetEngine}. */ public CronetEngine build() { return mBuilderDelegate.build(); } /** * Creates an implementation of {@link ICronetEngineBuilder} that can be used * to delegate the builder calls to. The method uses {@link CronetProvider} * to obtain the list of available providers. * * @param context Android Context to use. * @return the created {@code ICronetEngineBuilder}. */ private static ICronetEngineBuilder createBuilderDelegate(Context context) { List<CronetProvider> providers = new ArrayList<>(CronetProvider.getAllProviders(context)); CronetProvider provider = getEnabledCronetProviders(context, providers).get(0); if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, String.format("Using '%s' provider for creating CronetEngine.Builder.", provider)); } return provider.createBuilder().mBuilderDelegate; } /** * Returns the list of available and enabled {@link CronetProvider}. The returned list * is sorted based on the provider versions and types. * * @param context Android Context to use. * @param providers the list of enabled and disabled providers to filter out and sort. * @return the sorted list of enabled providers. The list contains at least one provider. * @throws RuntimeException is the list of providers is empty or all of the providers * are disabled. */ @VisibleForTesting static List<CronetProvider> getEnabledCronetProviders( Context context, List<CronetProvider> providers) { // Check that there is at least one available provider. if (providers.size() == 0) { throw new RuntimeException("Unable to find any Cronet provider." + " Have you included all necessary jars?"); } // Exclude disabled providers from the list. for (Iterator<CronetProvider> i = providers.iterator(); i.hasNext();) { CronetProvider provider = i.next(); if (!provider.isEnabled()) { i.remove(); } } // Check that there is at least one enabled provider. if (providers.size() == 0) { throw new RuntimeException("All available Cronet providers are disabled." + " A provider should be enabled before it can be used."); } // Sort providers based on version and type. Collections.sort(providers, new Comparator<CronetProvider>() { @Override public int compare(CronetProvider p1, CronetProvider p2) { // The fallback provider should always be at the end of the list. if (CronetProvider.PROVIDER_NAME_FALLBACK.equals(p1.getName())) { return 1; } if (CronetProvider.PROVIDER_NAME_FALLBACK.equals(p2.getName())) { return -1; } // A provider with higher version should go first. return -compareVersions(p1.getVersion(), p2.getVersion()); } }); return providers; } /** * Compares two strings that contain versions. The string should only contain * dot-separated segments that contain an arbitrary number of digits digits [0-9]. * * @param s1 the first string. * @param s2 the second string. * @return -1 if s1<s2, +1 if s1>s2 and 0 if s1=s2. If two versions are equal, the * version with the higher number of segments is considered to be higher. * * @throws IllegalArgumentException if any of the strings contains an illegal * version number. */ @VisibleForTesting static int compareVersions(String s1, String s2) { if (s1 == null || s2 == null) { throw new IllegalArgumentException("The input values cannot be null"); } String[] s1segments = s1.split("\\."); String[] s2segments = s2.split("\\."); for (int i = 0; i < s1segments.length && i < s2segments.length; i++) { try { int s1segment = Integer.parseInt(s1segments[i]); int s2segment = Integer.parseInt(s2segments[i]); if (s1segment != s2segment) { return Integer.signum(s1segment - s2segment); } } catch (NumberFormatException e) { throw new IllegalArgumentException("Unable to convert version segments into" + " integers: " + s1segments[i] + " & " + s2segments[i], e); } } return Integer.signum(s1segments.length - s2segments.length); } } /** * @return a human-readable version string of the engine. */ public abstract String getVersionString(); /** * Shuts down the {@link CronetEngine} if there are no active requests, * otherwise throws an exception. * * Cannot be called on network thread - the thread Cronet calls into * Executor on (which is different from the thread the Executor invokes * callbacks on). May block until all the {@code CronetEngine}'s * resources have been cleaned up. */ public abstract void shutdown(); /** * Starts NetLog logging to a file. The NetLog will contain events emitted * by all live CronetEngines. The NetLog is useful for debugging. * The file can be viewed using a Chrome browser navigated to * chrome://net-internals/#import * @param fileName the complete file path. It must not be empty. If the file * exists, it is truncated before starting. If actively logging, * this method is ignored. * @param logAll {@code true} to include basic events, user cookies, * credentials and all transferred bytes in the log. This option presents * a privacy risk, since it exposes the user's credentials, and should * only be used with the user's consent and in situations where the log * won't be public. * {@code false} to just include basic events. */ public abstract void startNetLogToFile(String fileName, boolean logAll); /** * Stops NetLog logging and flushes file to disk. If a logging session is * not in progress, this call is ignored. */ public abstract void stopNetLog(); /** * Returns differences in metrics collected by Cronet since the last call to * this method. * <p> * Cronet collects these metrics globally. This means deltas returned by * {@code getGlobalMetricsDeltas()} will include measurements of requests * processed by other {@link CronetEngine} instances. Since this function * returns differences in metrics collected since the last call, and these * metrics are collected globally, a call to any {@code CronetEngine} * instance's {@code getGlobalMetricsDeltas()} method will affect the deltas * returned by any other {@code CronetEngine} instance's * {@code getGlobalMetricsDeltas()}. * <p> * Cronet starts collecting these metrics after the first call to * {@code getGlobalMetricsDeltras()}, so the first call returns no * useful data as no metrics have yet been collected. * * @return differences in metrics collected by Cronet, since the last call * to {@code getGlobalMetricsDeltas()}, serialized as a * <a href=https://developers.google.com/protocol-buffers>protobuf * </a>. */ public abstract byte[] getGlobalMetricsDeltas(); /** * Establishes a new connection to the resource specified by the {@link URL} {@code url}. * <p> * <b>Note:</b> Cronet's {@link java.net.HttpURLConnection} implementation is subject to certain * limitations, see {@link #createURLStreamHandlerFactory} for details. * * @param url URL of resource to connect to. * @return an {@link java.net.HttpURLConnection} instance implemented by this CronetEngine. * @throws IOException if an error occurs while opening the connection. */ public abstract URLConnection openConnection(URL url) throws IOException; /** * Creates a {@link URLStreamHandlerFactory} to handle HTTP and HTTPS * traffic. An instance of this class can be installed via * {@link URL#setURLStreamHandlerFactory} thus using this CronetEngine by default for * all requests created via {@link URL#openConnection}. * <p> * Cronet does not use certain HTTP features provided via the system: * <ul> * <li>the HTTP cache installed via * {@link HttpResponseCache#install(java.io.File, long) HttpResponseCache.install()}</li> * <li>the HTTP authentication method installed via * {@link java.net.Authenticator#setDefault}</li> * <li>the HTTP cookie storage installed via {@link java.net.CookieHandler#setDefault}</li> * </ul> * <p> * While Cronet supports and encourages requests using the HTTPS protocol, * Cronet does not provide support for the * {@link HttpsURLConnection} API. This lack of support also * includes not using certain HTTPS features provided via the system: * <ul> * <li>the HTTPS hostname verifier installed via {@link * HttpsURLConnection#setDefaultHostnameVerifier(javax.net.ssl.HostnameVerifier) * HttpsURLConnection.setDefaultHostnameVerifier()}</li> * <li>the HTTPS socket factory installed via {@link * HttpsURLConnection#setDefaultSSLSocketFactory(javax.net.ssl.SSLSocketFactory) * HttpsURLConnection.setDefaultSSLSocketFactory()}</li> * </ul> * * @return an {@link URLStreamHandlerFactory} instance implemented by this * CronetEngine. */ public abstract URLStreamHandlerFactory createURLStreamHandlerFactory(); /** * Creates a builder for {@link UrlRequest}. All callbacks for * generated {@link UrlRequest} objects will be invoked on * {@code executor}'s threads. {@code executor} must not run tasks on the * thread calling {@link Executor#execute} to prevent blocking networking * operations and causing exceptions during shutdown. * * @param url URL for the generated requests. * @param callback callback object that gets invoked on different events. * @param executor {@link Executor} on which all callbacks will be invoked. */ public abstract UrlRequest.Builder newUrlRequestBuilder( String url, UrlRequest.Callback callback, Executor executor); }
package org.osmdroid.samples; import org.osmdroid.R; import org.osmdroid.ResourceProxy; import org.osmdroid.ResourceProxyImpl; import org.osmdroid.api.IMapController; import org.osmdroid.constants.OpenStreetMapConstants; import org.osmdroid.tileprovider.tilesource.ITileSource; import org.osmdroid.tileprovider.tilesource.TileSourceFactory; import org.osmdroid.util.GeoPoint; import org.osmdroid.views.MapView; import org.osmdroid.views.overlay.MinimapOverlay; import org.osmdroid.views.overlay.ScaleBarOverlay; import org.osmdroid.views.overlay.SimpleLocationOverlay; import android.location.Location; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.SubMenu; import android.view.View; import android.view.View.OnClickListener; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.RelativeLayout.LayoutParams; /** * * @author Nicolas Gramlich * */ public class SampleExtensive extends SampleMapActivity implements OpenStreetMapConstants { // =========================================================== // Constants // =========================================================== private static final int MENU_ZOOMIN_ID = Menu.FIRST; private static final int MENU_ZOOMOUT_ID = MENU_ZOOMIN_ID + 1; private static final int MENU_TILE_SOURCE_ID = MENU_ZOOMOUT_ID + 1; private static final int MENU_ANIMATION_ID = MENU_TILE_SOURCE_ID + 1; private static final int MENU_MINIMAP_ID = MENU_ANIMATION_ID + 1; // =========================================================== // Fields // =========================================================== private MapView mOsmv; private IMapController mOsmvController; private SimpleLocationOverlay mMyLocationOverlay; private ResourceProxy mResourceProxy; private ScaleBarOverlay mScaleBarOverlay; private MinimapOverlay mMiniMapOverlay; // =========================================================== // Constructors // =========================================================== /** Called when the activity is first created. */ @Override public void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState, false); // Pass true here to actually contribute to OSM! mResourceProxy = new ResourceProxyImpl(getApplicationContext()); final RelativeLayout rl = new RelativeLayout(this); this.mOsmv = new MapView(this, 256); this.mOsmvController = this.mOsmv.getController(); rl.addView(this.mOsmv, new RelativeLayout.LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); /* Scale Bar Overlay */ { this.mScaleBarOverlay = new ScaleBarOverlay(this, mResourceProxy); this.mOsmv.getOverlays().add(mScaleBarOverlay); // Scale bar tries to draw as 1-inch, so to put it in the top center, set x offset to // half screen width, minus half an inch. this.mScaleBarOverlay.setScaleBarOffset( (int) (getResources().getDisplayMetrics().widthPixels / 2 - getResources() .getDisplayMetrics().xdpi / 2), 10); } /* SingleLocation-Overlay */ { /* * Create a static Overlay showing a single location. (Gets updated in * onLocationChanged(Location loc)! */ this.mMyLocationOverlay = new SimpleLocationOverlay(this, mResourceProxy); this.mOsmv.getOverlays().add(mMyLocationOverlay); } /* ZoomControls */ { /* Create a ImageView with a zoomIn-Icon. */ final ImageView ivZoomIn = new ImageView(this); ivZoomIn.setImageResource(R.drawable.zoom_in); /* Create RelativeLayoutParams, that position it in the top right corner. */ final RelativeLayout.LayoutParams zoominParams = new RelativeLayout.LayoutParams( RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT); zoominParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); zoominParams.addRule(RelativeLayout.ALIGN_PARENT_TOP); rl.addView(ivZoomIn, zoominParams); ivZoomIn.setOnClickListener(new OnClickListener() { @Override public void onClick(final View v) { SampleExtensive.this.mOsmvController.zoomIn(); } }); /* Create a ImageView with a zoomOut-Icon. */ final ImageView ivZoomOut = new ImageView(this); ivZoomOut.setImageResource(R.drawable.zoom_out); /* Create RelativeLayoutParams, that position it in the top left corner. */ final RelativeLayout.LayoutParams zoomoutParams = new RelativeLayout.LayoutParams( RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT); zoomoutParams.addRule(RelativeLayout.ALIGN_PARENT_LEFT); zoomoutParams.addRule(RelativeLayout.ALIGN_PARENT_TOP); rl.addView(ivZoomOut, zoomoutParams); ivZoomOut.setOnClickListener(new OnClickListener() { @Override public void onClick(final View v) { SampleExtensive.this.mOsmvController.zoomOut(); } }); } /* MiniMap */ { mMiniMapOverlay = new MinimapOverlay(this, mOsmv.getTileRequestCompleteHandler()); this.mOsmv.getOverlays().add(mMiniMapOverlay); } // PathOverlay pathOverlay = new PathOverlay(Color.RED, this); // pathOverlay.addPoint(new GeoPoint(40.714623, -74.006605)); // pathOverlay.addPoint(new GeoPoint(38.8951118, -77.0363658)); // pathOverlay.addPoint(new GeoPoint(34.052186, -118.243932)); // pathOverlay.getPaint().setStrokeWidth(50.0f); // pathOverlay.setAlpha(100); // this.mOsmv.getOverlays().add(pathOverlay); this.setContentView(rl); } // =========================================================== // Getter & Setter // =========================================================== // =========================================================== // Methods from SuperClass/Interfaces // =========================================================== @Override public void onLocationChanged(final Location pLoc) { this.mMyLocationOverlay.setLocation(new GeoPoint(pLoc)); } @Override public void onLocationLost() { // We'll do nothing here. } @Override public boolean onCreateOptionsMenu(final Menu pMenu) { pMenu.add(0, MENU_ZOOMIN_ID, Menu.NONE, "ZoomIn"); pMenu.add(0, MENU_ZOOMOUT_ID, Menu.NONE, "ZoomOut"); final SubMenu subMenu = pMenu.addSubMenu(0, MENU_TILE_SOURCE_ID, Menu.NONE, "Choose Tile Source"); { for (final ITileSource tileSource : TileSourceFactory.getTileSources()) { subMenu.add(0, 1000 + tileSource.ordinal(), Menu.NONE, tileSource.localizedName(mResourceProxy)); } } pMenu.add(0, MENU_ANIMATION_ID, Menu.NONE, "Run Animation"); pMenu.add(0, MENU_MINIMAP_ID, Menu.NONE, "Toggle Minimap"); return true; } @Override public boolean onMenuItemSelected(final int featureId, final MenuItem item) { switch (item.getItemId()) { case MENU_ZOOMIN_ID: this.mOsmvController.zoomIn(); return true; case MENU_ZOOMOUT_ID: this.mOsmvController.zoomOut(); return true; case MENU_TILE_SOURCE_ID: this.mOsmv.invalidate(); return true; case MENU_MINIMAP_ID: mMiniMapOverlay.setEnabled(!mMiniMapOverlay.isEnabled()); this.mOsmv.invalidate(); return true; case MENU_ANIMATION_ID: // this.mOsmv.getController().animateTo(52370816, 9735936, // MapControllerOld.AnimationType.MIDDLEPEAKSPEED, // MapControllerOld.ANIMATION_SMOOTHNESS_HIGH, // MapControllerOld.ANIMATION_DURATION_DEFAULT); // Hannover // Stop the Animation after 500ms (just to show that it works) // new Handler().postDelayed(new Runnable(){ // @Override // public void run() { // SampleExtensive.this.mOsmv.getController().stopAnimation(false); // } // }, 500); return true; default: ITileSource tileSource = TileSourceFactory.getTileSource(item.getItemId() - 1000); mOsmv.setTileSource(tileSource); mMiniMapOverlay.setTileSource(tileSource); } return false; } // =========================================================== // Methods // =========================================================== // =========================================================== // Inner and Anonymous Classes // =========================================================== }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.jdbc2; import java.io.Serializable; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteJdbcDriver; import org.apache.ignite.IgniteSystemProperties; import org.apache.ignite.cache.query.QueryCursor; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.cache.QueryCursorImpl; import org.apache.ignite.internal.processors.query.GridQueryFieldMetadata; import org.apache.ignite.internal.util.typedef.CAX; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.resources.IgniteInstanceResource; /** * Task for SQL queries execution through {@link IgniteJdbcDriver}. * <p> * Not closed cursors will be removed after {@link #RMV_DELAY} milliseconds. * This parameter can be configured via {@link IgniteSystemProperties#IGNITE_JDBC_DRIVER_CURSOR_REMOVE_DELAY} * system property. * * Deprecated due to introduction of DML features - see {@link JdbcQueryTaskV2}. */ @Deprecated class JdbcQueryTask implements IgniteCallable<JdbcQueryTask.QueryResult> { /** Serial version uid. */ private static final long serialVersionUID = 0L; /** How long to store open cursor. */ private static final long RMV_DELAY = IgniteSystemProperties.getLong( IgniteSystemProperties.IGNITE_JDBC_DRIVER_CURSOR_REMOVE_DELAY, 600000); /** Scheduler. */ private static final ScheduledExecutorService SCHEDULER = Executors.newScheduledThreadPool(1); /** Open cursors. */ private static final ConcurrentMap<UUID, Cursor> CURSORS = new ConcurrentHashMap<>(); /** Ignite. */ @IgniteInstanceResource private Ignite ignite; /** Uuid. */ private final UUID uuid; /** Cache name. */ private final String cacheName; /** Sql. */ private final String sql; /** Args. */ private final Object[] args; /** Fetch size. */ private final int fetchSize; /** Local execution flag. */ private final boolean loc; /** Local query flag. */ private final boolean locQry; /** Collocated query flag. */ private final boolean collocatedQry; /** Distributed joins flag. */ private final boolean distributedJoins; /** * @param ignite Ignite. * @param cacheName Cache name. * @param sql Sql query. * @param loc Local execution flag. * @param args Args. * @param fetchSize Fetch size. * @param uuid UUID. * @param locQry Local query flag. * @param collocatedQry Collocated query flag. * @param distributedJoins Distributed joins flag. */ public JdbcQueryTask(Ignite ignite, String cacheName, String sql, boolean loc, Object[] args, int fetchSize, UUID uuid, boolean locQry, boolean collocatedQry, boolean distributedJoins) { this.ignite = ignite; this.args = args; this.uuid = uuid; this.cacheName = cacheName; this.sql = sql; this.fetchSize = fetchSize; this.loc = loc; this.locQry = locQry; this.collocatedQry = collocatedQry; this.distributedJoins = distributedJoins; } /** {@inheritDoc} */ @Override public JdbcQueryTask.QueryResult call() throws Exception { Cursor cursor = CURSORS.get(uuid); List<String> tbls = null; List<String> cols = null; List<String> types = null; boolean first; if (first = (cursor == null)) { IgniteCache<?, ?> cache = ignite.cache(cacheName); // Don't create caches on server nodes in order to avoid of data rebalancing. boolean start = ignite.configuration().isClientMode(); if (cache == null && cacheName == null) cache = ((IgniteKernal)ignite).context().cache().getOrStartPublicCache(start, !loc && locQry); if (cache == null) { if (cacheName == null) throw new SQLException("Failed to execute query. No suitable caches found."); else throw new SQLException("Cache not found [cacheName=" + cacheName + ']'); } SqlFieldsQuery qry = new SqlFieldsQuery(sql).setArgs(args); qry.setPageSize(fetchSize); qry.setLocal(locQry); qry.setCollocated(collocatedQry); qry.setDistributedJoins(distributedJoins); QueryCursor<List<?>> qryCursor = cache.withKeepBinary().query(qry); Collection<GridQueryFieldMetadata> meta = ((QueryCursorImpl<List<?>>)qryCursor).fieldsMeta(); tbls = new ArrayList<>(meta.size()); cols = new ArrayList<>(meta.size()); types = new ArrayList<>(meta.size()); for (GridQueryFieldMetadata desc : meta) { tbls.add(desc.typeName()); cols.add(desc.fieldName().toUpperCase()); types.add(desc.fieldTypeName()); } CURSORS.put(uuid, cursor = new Cursor(qryCursor, qryCursor.iterator())); } List<List<?>> rows = new ArrayList<>(); for (List<?> row : cursor) { List<Object> row0 = new ArrayList<>(row.size()); for (Object val : row) row0.add(val == null || JdbcUtils.isSqlType(val.getClass()) ? val : val.toString()); rows.add(row0); if (rows.size() == fetchSize) // If fetchSize is 0 then unlimited break; } boolean finished = !cursor.hasNext(); if (finished) remove(uuid, cursor); else if (first) { if (!loc) scheduleRemoval(uuid, RMV_DELAY); } else if (!loc && !CURSORS.replace(uuid, cursor, new Cursor(cursor.cursor, cursor.iter))) assert !CURSORS.containsKey(uuid) : "Concurrent cursor modification."; return new QueryResult(uuid, finished, rows, cols, tbls, types); } /** * Schedules removal of stored cursor in case of remote query execution. * * @param uuid Cursor UUID. * @param delay Delay in milliseconds. */ private void scheduleRemoval(final UUID uuid, long delay) { assert !loc; SCHEDULER.schedule(new CAX() { @Override public void applyx() { while (true) { Cursor c = CURSORS.get(uuid); if (c == null) break; // If the cursor was accessed since last scheduling then reschedule. long untouchedTime = U.currentTimeMillis() - c.lastAccessTime; if (untouchedTime < RMV_DELAY) { scheduleRemoval(uuid, RMV_DELAY - untouchedTime); break; } else if (remove(uuid, c)) break; } } }, delay, TimeUnit.MILLISECONDS); } /** * @param uuid Cursor UUID. * @param c Cursor. * @return {@code true} If succeeded. */ private static boolean remove(UUID uuid, Cursor c) { boolean rmv = CURSORS.remove(uuid, c); if (rmv) c.cursor.close(); return rmv; } /** * Closes and removes cursor. * * @param uuid Cursor UUID. */ static void remove(UUID uuid) { Cursor c = CURSORS.remove(uuid); if (c != null) c.cursor.close(); } /** * Result of query execution. */ static class QueryResult implements Serializable { /** Serial version uid. */ private static final long serialVersionUID = 0L; /** Uuid. */ private final UUID uuid; /** Finished. */ private final boolean finished; /** Rows. */ private final List<List<?>> rows; /** Tables. */ private final List<String> tbls; /** Columns. */ private final List<String> cols; /** Types. */ private final List<String> types; /** * @param uuid UUID.. * @param finished Finished. * @param rows Rows. * @param cols Columns. * @param tbls Tables. * @param types Types. */ public QueryResult(UUID uuid, boolean finished, List<List<?>> rows, List<String> cols, List<String> tbls, List<String> types) { this.cols = cols; this.uuid = uuid; this.finished = finished; this.rows = rows; this.tbls = tbls; this.types = types; } /** * @return Query result rows. */ public List<List<?>> getRows() { return rows; } /** * @return Tables metadata. */ public List<String> getTbls() { return tbls; } /** * @return Columns metadata. */ public List<String> getCols() { return cols; } /** * @return Types metadata. */ public List<String> getTypes() { return types; } /** * @return Query UUID. */ public UUID getUuid() { return uuid; } /** * @return {@code True} if it is finished query. */ public boolean isFinished() { return finished; } } /** * Cursor. */ private static final class Cursor implements Iterable<List<?>> { /** Cursor. */ final QueryCursor<List<?>> cursor; /** Iterator. */ final Iterator<List<?>> iter; /** Last access time. */ final long lastAccessTime; /** * @param cursor Cursor. * @param iter Iterator. */ private Cursor(QueryCursor<List<?>> cursor, Iterator<List<?>> iter) { this.cursor = cursor; this.iter = iter; this.lastAccessTime = U.currentTimeMillis(); } /** {@inheritDoc} */ @Override public Iterator<List<?>> iterator() { return iter; } /** * @return {@code True} if cursor has next element. */ public boolean hasNext() { return iter.hasNext(); } } }
/* * Copyright 2014 The Skfiy Open Association. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.skfiy.typhon.action; import java.util.List; import javax.inject.Inject; import org.skfiy.typhon.annotation.Action; import org.skfiy.typhon.domain.Bag; import org.skfiy.typhon.domain.Bag.Node; import org.skfiy.typhon.domain.ITroop; import org.skfiy.typhon.domain.Player; import org.skfiy.typhon.domain.Troop; import org.skfiy.typhon.domain.item.TroopItem; import org.skfiy.typhon.packet.MultipleValue; import org.skfiy.typhon.packet.Namespaces; import org.skfiy.typhon.packet.Packet; import org.skfiy.typhon.packet.PacketError; import org.skfiy.typhon.packet.SingleValue; import org.skfiy.typhon.packet.TroopPacket; import org.skfiy.typhon.packet.TroopStrengPacket; import org.skfiy.typhon.session.SessionUtils; import org.skfiy.typhon.spi.troop.TroopProvider; /** * * @author Kevin Zou <kevinz@skfiy.org> */ public class TroopAction { @Inject private TroopProvider troopProvider; @Action(Namespaces.TROOP_STRENG) public void troopStreng(TroopStrengPacket packet) { Player player = SessionUtils.getPlayer(); List<Integer> intensities = packet.getIntensities(); for (int pos : intensities) { Node node = player.getBag().findNode(pos); if (node == null) { PacketError error = PacketError.createResult(packet, PacketError.Condition.conflict); error.setText("Not found item[" + pos + "]/count is not enough"); player.getSession().write(error); return; } } troopProvider.rise(packet); } @Action(Namespaces.TROOP_HARDEN) public void harden(SingleValue packet) { troopProvider.harden(packet); } @Action(Namespaces.TROOP_RESOLVE) public void resolve(MultipleValue packet) { troopProvider.resolve(packet); } @Action(Namespaces.TROOP_EQUIP) public void equip(TroopPacket packet) { Player player = SessionUtils.getPlayer(); Node node = player.getBag().findNode(packet.getPos()); if (node == null) { PacketError error = PacketError.createResult(packet, PacketError.Condition.item_not_found); player.getSession().write(error); return; } TroopItem troopItem = node.getItem(); if (troopItem.getActiveType() != null) { PacketError error = PacketError.createResult(packet, PacketError.Condition.conflict); error.setText("Actived"); player.getSession().write(error); return; } ITroop.Type troopType = ITroop.Type.valueOf(packet.getCasernType()); Troop troop = player.getNormal().getTroop(troopType); if (troop.isFull()) { PacketError error = PacketError.createResult(packet, PacketError.Condition.conflict); error.setText("IsFull"); player.getSession().write(error); return; } boolean sameType = isSameType(troopItem, player.getBag(), troop.getFirst()) || isSameType(troopItem, player.getBag(), troop.getSecond()) || isSameType(troopItem, player.getBag(), troop.getThird()) || isSameType(troopItem, player.getBag(), troop.getFour()) || isSameType(troopItem, player.getBag(), troop.getFive()); if (sameType) { PacketError error = PacketError.createResult(packet, PacketError.Condition.conflict); error.setText("Same Type"); player.getSession().write(error); return; } //========================================================================================== boolean success = false; switch (packet.getTarget()) { case 1: if (troop.getFirst() <= 0) { troop.setFirst(packet.getPos()); success = true; } break; case 2: if (troop.getSecond() <= 0) { troop.setSecond(packet.getPos()); success = true; } break; case 3: if (troop.getThird() <= 0) { troop.setThird(packet.getPos()); success = true; } break; case 4: if (troop.getFour() <= 0) { troop.setFour(packet.getPos()); success = true; } break; case 5: if (troop.getFive() <= 0) { troop.setFive(packet.getPos()); success = true; } break; } if (!success) { PacketError error = PacketError.createResult(packet, PacketError.Condition.conflict); error.setText("Not success"); player.getSession().write(error); return; } troopItem.setActiveType(troopType); player.getSession().write(Packet.createResult(packet)); troopProvider.calculateTroopProps(player); } /** * * @param packet */ @Action(Namespaces.TROOP_UNEQUIP) public void unequip(TroopPacket packet) { Player player = SessionUtils.getPlayer(); Troop troop = player.getNormal().getTroop(ITroop.Type.valueOf(packet.getCasernType())); int pos = 0; boolean success = false; switch (packet.getTarget()) { case 1: if (troop.getFirst() > 0) { pos = troop.getFirst(); troop.setFirst(0); success = true; } break; case 2: if (troop.getSecond() > 0) { pos = troop.getSecond(); troop.setSecond(0); success = true; } break; case 3: if (troop.getThird() > 0) { pos = troop.getThird(); troop.setThird(0); success = true; } break; case 4: if (troop.getFour() > 0) { pos = troop.getFour(); troop.setFour(0); success = true; } break; case 5: if (troop.getFive() > 0) { pos = troop.getFive(); troop.setFive(0); success = true; } break; } if (!success) { PacketError error = PacketError.createResult(packet, PacketError.Condition.conflict); error.setText("Not success"); player.getSession().write(error); return; } Node node = player.getBag().findNode(pos); TroopItem troopItem = node.getItem(); troopItem.setActiveType(null); player.getSession().write(Packet.createResult(packet)); troopProvider.calculateTroopProps(player); } private boolean isSameType(TroopItem troopItem, Bag bag, int pos) { Node n = bag.findNode(pos); return (n != null && troopItem.getItemDobj().getPrimary() == ((TroopItem) n.getItem()).getItemDobj().getPrimary()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.controller.internal; import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.StaticallyInject; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.controller.spi.NoSuchParentResourceException; import org.apache.ambari.server.controller.spi.NoSuchResourceException; import org.apache.ambari.server.controller.spi.Predicate; import org.apache.ambari.server.controller.spi.Request; import org.apache.ambari.server.controller.spi.RequestStatus; import org.apache.ambari.server.controller.spi.Resource; import org.apache.ambari.server.controller.spi.SystemException; import org.apache.ambari.server.controller.spi.UnsupportedPropertyException; import org.apache.ambari.server.controller.utilities.PropertyHelper; import org.apache.ambari.server.orm.dao.HostRoleCommandDAO; import org.apache.ambari.server.orm.dao.HostRoleCommandStatusSummaryDTO; import org.apache.ambari.server.orm.dao.StageDAO; import org.apache.ambari.server.orm.dao.UpgradeDAO; import org.apache.ambari.server.orm.entities.StageEntity; import org.apache.ambari.server.orm.entities.StageEntityPK; import org.apache.ambari.server.orm.entities.UpgradeEntity; import org.apache.ambari.server.orm.entities.UpgradeGroupEntity; import org.apache.ambari.server.orm.entities.UpgradeItemEntity; import org.apache.ambari.server.security.authorization.AuthorizationException; import org.apache.ambari.server.security.authorization.AuthorizationHelper; import org.apache.ambari.server.security.authorization.ResourceType; import org.apache.ambari.server.security.authorization.RoleAuthorization; import org.apache.ambari.server.state.Cluster; import com.google.inject.Inject; import org.apache.ambari.server.utils.SecretReference; import org.apache.commons.lang.StringUtils; /** * Manages the ability to get the status of upgrades. */ @StaticallyInject public class UpgradeItemResourceProvider extends ReadOnlyResourceProvider { public static final String UPGRADE_CLUSTER_NAME = "UpgradeItem/cluster_name"; public static final String UPGRADE_REQUEST_ID = "UpgradeItem/request_id"; public static final String UPGRADE_GROUP_ID = "UpgradeItem/group_id"; public static final String UPGRADE_ITEM_STAGE_ID = "UpgradeItem/stage_id"; public static final String UPGRADE_ITEM_TEXT = "UpgradeItem/text"; private static final Set<String> PK_PROPERTY_IDS = new HashSet<String>( Arrays.asList(UPGRADE_REQUEST_ID, UPGRADE_ITEM_STAGE_ID)); private static final Set<String> PROPERTY_IDS = new HashSet<String>(); private static final Map<Resource.Type, String> KEY_PROPERTY_IDS = new HashMap<Resource.Type, String>(); private static Map<String, String> STAGE_MAPPED_IDS = new HashMap<String, String>(); @Inject private static UpgradeDAO s_dao; @Inject private static StageDAO s_stageDao; @Inject private static HostRoleCommandDAO s_hostRoleCommandDAO; static { // properties PROPERTY_IDS.add(UPGRADE_ITEM_STAGE_ID); PROPERTY_IDS.add(UPGRADE_GROUP_ID); PROPERTY_IDS.add(UPGRADE_REQUEST_ID); PROPERTY_IDS.add(UPGRADE_ITEM_TEXT); // !!! boo for (String p : StageResourceProvider.PROPERTY_IDS) { STAGE_MAPPED_IDS.put(p, p.replace("Stage/", "UpgradeItem/")); } PROPERTY_IDS.addAll(STAGE_MAPPED_IDS.values()); // keys KEY_PROPERTY_IDS.put(Resource.Type.UpgradeItem, UPGRADE_ITEM_STAGE_ID); KEY_PROPERTY_IDS.put(Resource.Type.UpgradeGroup, UPGRADE_GROUP_ID); KEY_PROPERTY_IDS.put(Resource.Type.Upgrade, UPGRADE_REQUEST_ID); KEY_PROPERTY_IDS.put(Resource.Type.Cluster, UPGRADE_CLUSTER_NAME); } /** * Constructor. * * @param controller the controller */ UpgradeItemResourceProvider(AmbariManagementController controller) { super(PROPERTY_IDS, KEY_PROPERTY_IDS, controller); } @Override public RequestStatus updateResources(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException { // the request should contain a single map of update properties... Iterator<Map<String,Object>> iterator = request.getProperties().iterator(); if (iterator.hasNext()) { Map<String,Object> updateProperties = iterator.next(); String statusPropertyId = STAGE_MAPPED_IDS.get(StageResourceProvider.STAGE_STATUS); String stageStatus = (String) updateProperties.get(statusPropertyId); if (null == stageStatus) { throw new IllegalArgumentException("Upgrade items can only have their status changed."); } HostRoleStatus desiredStatus = HostRoleStatus.valueOf(stageStatus); Set<Resource> resources = getResources(PropertyHelper.getReadRequest(), predicate); for (Resource resource : resources) { final String clusterName = (String)resource.getPropertyValue(UPGRADE_CLUSTER_NAME); final Cluster cluster; try { cluster = getManagementController().getClusters().getCluster(clusterName); } catch (AmbariException e) { throw new NoSuchParentResourceException( String.format("Cluster %s could not be loaded", clusterName)); } if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, cluster.getResourceId(), EnumSet.of(RoleAuthorization.CLUSTER_UPGRADE_DOWNGRADE_STACK))) { throw new AuthorizationException("The authenticated user does not have authorization to " + "manage upgrade and downgrade"); } // Set the desired status on the underlying stage. Long requestId = (Long) resource.getPropertyValue(UPGRADE_REQUEST_ID); Long stageId = (Long) resource.getPropertyValue(UPGRADE_ITEM_STAGE_ID); StageEntityPK primaryKey = new StageEntityPK(); primaryKey.setRequestId(requestId); primaryKey.setStageId(stageId); StageEntity stageEntity = s_stageDao.findByPK(primaryKey); if (null == stageEntity) { LOG.warn( "Unable to change the status of request {} and stage {} to {} because it does not exist", requestId, stageId, desiredStatus); return getRequestStatus(null); } s_stageDao.updateStageStatus(stageEntity, desiredStatus, getManagementController().getActionManager()); } } notifyUpdate(Resource.Type.UpgradeItem, request, predicate); return getRequestStatus(null); } @Override public Set<Resource> getResources(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException { Set<Resource> results = new LinkedHashSet<Resource>(); Set<String> requestPropertyIds = getRequestPropertyIds(request, predicate); for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) { String clusterName = (String) propertyMap.get(UPGRADE_CLUSTER_NAME); String requestIdStr = (String) propertyMap.get(UPGRADE_REQUEST_ID); String groupIdStr = (String) propertyMap.get(UPGRADE_GROUP_ID); String stageIdStr = (String) propertyMap.get(UPGRADE_ITEM_STAGE_ID); if (null == requestIdStr || requestIdStr.isEmpty()) { throw new IllegalArgumentException("The upgrade id is required when querying for upgrades"); } if (null == groupIdStr || groupIdStr.isEmpty()) { throw new IllegalArgumentException("The upgrade group id is required when querying for upgrades"); } Long requestId = Long.valueOf(requestIdStr); Long groupId = Long.valueOf(groupIdStr); Long stageId = null; if (null != stageIdStr) { stageId = Long.valueOf(stageIdStr); } List<UpgradeItemEntity> entities = new ArrayList<UpgradeItemEntity>(); if (null == stageId) { UpgradeGroupEntity group = s_dao.findUpgradeGroup(groupId); if (null == group || null == group.getItems()) { throw new NoSuchResourceException(String.format("Cannot load upgrade for %s", requestIdStr)); } entities = group.getItems(); } else { UpgradeItemEntity entity = s_dao.findUpgradeItemByRequestAndStage(requestId, stageId); if (null != entity) { entities.add(entity); } } Map<Long, HostRoleCommandStatusSummaryDTO> requestAggregateCounts = s_hostRoleCommandDAO.findAggregateCounts(requestId); Map<Long, Map<Long, HostRoleCommandStatusSummaryDTO>> cache = new HashMap<>(); cache.put(requestId, requestAggregateCounts); // !!! need to do some lookup for stages, so use a stageid -> resource for // when that happens for (UpgradeItemEntity entity : entities) { Resource upgradeItemResource = toResource(entity, requestPropertyIds); StageEntityPK stagePrimaryKey = new StageEntityPK(); stagePrimaryKey.setRequestId(requestId); stagePrimaryKey.setStageId(entity.getStageId()); StageEntity stageEntity = s_stageDao.findByPK(stagePrimaryKey); Resource stageResource = StageResourceProvider.toResource(cache, stageEntity, StageResourceProvider.PROPERTY_IDS); for (String propertyId : StageResourceProvider.PROPERTY_IDS) { // Attempt to mask any passwords in fields that are property maps. Object value = stageResource.getPropertyValue(propertyId); if (StageResourceProvider.PROPERTIES_TO_MASK_PASSWORD_IN.contains(propertyId) && value.getClass().equals(String.class) && !StringUtils.isBlank((String) value)) { value = SecretReference.maskPasswordInPropertyMap((String) value); } setResourceProperty(upgradeItemResource, STAGE_MAPPED_IDS.get(propertyId), value, requestPropertyIds); } results.add(upgradeItemResource); } } return results; } @Override protected Set<String> getPKPropertyIds() { return PK_PROPERTY_IDS; } private Resource toResource(UpgradeItemEntity item, Set<String> requestedIds) { ResourceImpl resource = new ResourceImpl(Resource.Type.UpgradeItem); UpgradeGroupEntity group = item.getGroupEntity(); UpgradeEntity upgrade = group.getUpgradeEntity(); setResourceProperty(resource, UPGRADE_REQUEST_ID, upgrade.getRequestId(), requestedIds); setResourceProperty(resource, UPGRADE_GROUP_ID, group.getId(), requestedIds); setResourceProperty(resource, UPGRADE_ITEM_STAGE_ID, item.getStageId(), requestedIds); setResourceProperty(resource, UPGRADE_ITEM_TEXT, item.getText(), requestedIds); return resource; } }
/******************************************************************************* * Copyright SemanticBits, Northwestern University and Akaza Research * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/caaers/LICENSE.txt for details. ******************************************************************************/ package gov.nih.nci.cabig.caaers.api; import gov.nih.nci.cabig.caaers.CaaersDbNoSecurityTestCase; import gov.nih.nci.cabig.caaers.api.impl.DefaultResearchStaffMigratorService; import gov.nih.nci.cabig.caaers.dao.query.ResearchStaffQuery; import gov.nih.nci.cabig.caaers.domain.Identifier; import gov.nih.nci.cabig.caaers.domain.Organization; import gov.nih.nci.cabig.caaers.domain.ResearchStaff; import gov.nih.nci.cabig.caaers.domain.SiteResearchStaff; import gov.nih.nci.cabig.caaers.domain.repository.ResearchStaffRepository; import gov.nih.nci.cabig.caaers.integration.schema.researchstaff.ResearchStaffType; import gov.nih.nci.cabig.caaers.integration.schema.researchstaff.SiteResearchStaffRoleType; import gov.nih.nci.cabig.caaers.integration.schema.researchstaff.SiteResearchStaffType; import java.io.File; import java.io.IOException; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.List; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.datatype.DatatypeConstants; import javax.xml.datatype.DatatypeFactory; import javax.xml.datatype.XMLGregorianCalendar; import org.apache.commons.lang.StringUtils; import org.junit.Test; import org.springframework.core.io.Resource; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.core.io.support.ResourcePatternResolver; public class ResearchStaffMigratorServiceTest extends CaaersDbNoSecurityTestCase { private DefaultResearchStaffMigratorService svc = null; private JAXBContext jaxbContext = null; private Unmarshaller unmarshaller = null; private gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff staff = null; private File xmlFile = null; private ResearchStaffRepository researchStaffRepository= null; Identifier identifier = null; Organization organization = null; ResearchStaff updatedResearchStaff = null; @Override protected void setUp() throws Exception { super.setUp(); jaxbContext = JAXBContext.newInstance("gov.nih.nci.cabig.caaers.integration.schema.researchstaff"); unmarshaller = jaxbContext.createUnmarshaller(); svc = (DefaultResearchStaffMigratorService)getDeployedApplicationContext().getBean("researchStaffMigratorService"); researchStaffRepository = (ResearchStaffRepository)getDeployedApplicationContext().getBean("researchStaffRepository"); } @Test public void skipTestFetchReseachStaffByLoginId() throws Exception{ try { //Create or update , whatever it is new data will be populated .. xmlFile = getResources("classpath*:gov/nih/nci/cabig/caaers/api/testdata/CreateResearchStaffTest.xml")[0].getFile(); staff = (gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff)unmarshaller.unmarshal(xmlFile); modifyDates(staff); svc.saveResearchStaff(staff); interruptSession(); //update with modified data .. xmlFile = getResources("classpath*:gov/nih/nci/cabig/caaers/api/testdata/UpdateResearchStaffTest.xml")[0].getFile(); staff = (gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff)unmarshaller.unmarshal(xmlFile); modifyDates(staff); svc.saveResearchStaff(staff); interruptSession(); updatedResearchStaff = fetchResearchStaff("jchapman"); assertNotNull("The updated reseach staff should not be null.", updatedResearchStaff); assertEquals("111-345-0983", updatedResearchStaff.getFaxNumber()); assertEquals("111-678-0098", updatedResearchStaff.getPhoneNumber()); assertEquals("caaers.app2@gmail.com",updatedResearchStaff.getEmailAddress()); assertNotNull("Research Staff should have an address.", updatedResearchStaff.getAddress()); assertEquals("13921 Park Center Road", updatedResearchStaff.getAddress().getStreet()); assertEquals("Herndon", updatedResearchStaff.getAddress().getCity()); assertNotNull(updatedResearchStaff.getSiteResearchStaffs()); assertEquals(1,updatedResearchStaff.getSiteResearchStaffs().size()); } catch (IOException e) { e.printStackTrace(); fail("Error running test: " + e.getMessage()); } catch (JAXBException e) { e.printStackTrace(); fail("Error running test: " + e.getMessage()); } } @Test public void skipTestResearchStaffByEmailSave() throws Exception{ try { //Create or update , whatever it is new data will be populated .. xmlFile = getResources("classpath*:gov/nih/nci/cabig/caaers/api/testdata/CreateResearchStaffTest2.xml")[0].getFile(); staff = (gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff)unmarshaller.unmarshal(xmlFile); modifyDates(staff); svc.saveResearchStaff(staff); //update with modified data .. xmlFile = getResources("classpath*:gov/nih/nci/cabig/caaers/api/testdata/UpdateResearchStaffTest2.xml")[0].getFile(); staff = (gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff)unmarshaller.unmarshal(xmlFile); modifyDates(staff); svc.saveResearchStaff(staff); interruptSession(); updatedResearchStaff = fetchResearchStaff("caaers.rock@gmail.com"); assertNotNull("The updated reseach staff should not be null.", updatedResearchStaff); assertEquals("980-090-0983", updatedResearchStaff.getFaxNumber()); assertEquals("657-093-0098", updatedResearchStaff.getPhoneNumber()); } catch (IOException e) { e.printStackTrace(); fail("Error running test: " + e.getMessage()); } catch (JAXBException e) { e.printStackTrace(); fail("Error running test: " + e.getMessage()); } } @Test public void skipTestSiteRsAdd() throws Exception{ try { //Create or update , whatever it is new data will be populated .. xmlFile = getResources("classpath*:gov/nih/nci/cabig/caaers/api/testdata/CreateResearchStaffTest.xml")[0].getFile(); staff = (gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff)unmarshaller.unmarshal(xmlFile); modifyDates(staff); svc.saveResearchStaff(staff); interruptSession(); //update with modified data .. xmlFile = getResources("classpath*:gov/nih/nci/cabig/caaers/api/testdata/UpdateResearchStaffSiteRsAdd.xml")[0].getFile(); staff = (gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff)unmarshaller.unmarshal(xmlFile); modifyDates(staff); svc.saveResearchStaff(staff); interruptSession(); updatedResearchStaff = fetchResearchStaff("jchapman"); assertNotNull("The updated reseach staff should not be null.", updatedResearchStaff); assertNotNull(updatedResearchStaff.getSiteResearchStaffs()); // assertEquals(1,updatedResearchStaff.getSiteResearchStaffs().size()); //for some reason this fails in oracle. } catch (IOException e) { e.printStackTrace(); fail("Error running test: " + e.getMessage()); } catch (JAXBException e) { e.printStackTrace(); fail("Error running test: " + e.getMessage()); } } public void _BROKEN_testSiteRsRemove() throws Exception{ try { //Create or update , whatever it is new data will be populated .. xmlFile = getResources("classpath*:gov/nih/nci/cabig/caaers/api/testdata/CreateResearchStaffWithTwoSiteRs.xml")[0].getFile(); staff = (gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff)unmarshaller.unmarshal(xmlFile); modifyDates(staff); svc.saveResearchStaff(staff); //update with modified data .. xmlFile = getResources("classpath*:gov/nih/nci/cabig/caaers/api/testdata/UpdateResearchStaffRemoveSiteRs.xml")[0].getFile(); staff = (gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff)unmarshaller.unmarshal(xmlFile); modifyDates(staff); svc.saveResearchStaff(staff); updatedResearchStaff = fetchResearchStaff("jchapman"); assertNotNull("The updated staff should not be null.", updatedResearchStaff); assertNotNull("The site research staff should not be null.", updatedResearchStaff.getSiteResearchStaffs()); assertEquals("There should be two site research staff.", 2,updatedResearchStaff.getSiteResearchStaffs().size()); for(SiteResearchStaff siteResearchStaff : updatedResearchStaff.getSiteResearchStaffs()){ assertNotNull("Site Research Staff should have a role.", siteResearchStaff.getSiteResearchStaffRoles()); assertEquals("Site Research staff should have 2 roles.", 2, siteResearchStaff.getSiteResearchStaffRoles().size()); } } catch (IOException e) { e.printStackTrace(); fail("Error running test: " + e.getMessage()); } catch (JAXBException e) { e.printStackTrace(); fail("Error running test: " + e.getMessage()); } } /** * Fetches the research staff from the DB * @return */ private ResearchStaff fetchResearchStaff(String loginId) {//String nciIdentifier) { ResearchStaffQuery rsQuery = new ResearchStaffQuery(); if (StringUtils.isNotEmpty(loginId)) { //rsQuery.filterByNciIdentifier(nciIdentifier); rsQuery.filterByExactLoginId(loginId); } List<ResearchStaff> rsList = researchStaffRepository.searchResearchStaff(rsQuery); if (rsList == null || rsList.isEmpty()) { return null; } return rsList.get(0); } private static Resource[] getResources(String pattern) throws IOException { ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(); Resource[] resources = resolver.getResources(pattern); return resources; } private void modifyDates(gov.nih.nci.cabig.caaers.integration.schema.researchstaff.Staff staff) throws Exception{ DatatypeFactory df = DatatypeFactory.newInstance(); Calendar gcNow = GregorianCalendar.getInstance(); int year = gcNow.get(Calendar.YEAR); int month = gcNow.get(Calendar.MONTH)+1; int day = gcNow.get(Calendar.DAY_OF_MONTH); int tz = DatatypeConstants.FIELD_UNDEFINED; XMLGregorianCalendar currXmlCal = df.newXMLGregorianCalendarDate(year, month, day, tz); XMLGregorianCalendar furXmlCal = df.newXMLGregorianCalendarDate(year+1, month, day, tz); List<ResearchStaffType> researchStaffList = staff.getResearchStaff(); List<SiteResearchStaffType> siteRsTypeList; List<SiteResearchStaffRoleType> siteRsRoleTypeList; for (ResearchStaffType researchStaffType:researchStaffList) { siteRsTypeList = researchStaffType.getSiteResearchStaffs().getSiteResearchStaff(); for(SiteResearchStaffType sRsType : siteRsTypeList){ siteRsRoleTypeList = sRsType.getSiteResearchStaffRoles().getSiteResearchStaffRole(); for(SiteResearchStaffRoleType sRsRoleType : siteRsRoleTypeList){ sRsRoleType.setStartDate(currXmlCal); sRsRoleType.setEndDate(furXmlCal); } } } } //TODO:fix the test cases and remove this public void test(){ } }
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.net; import android.content.Context; import android.os.Build; import android.os.ConditionVariable; import android.os.Handler; import android.os.Looper; import android.os.Process; import android.util.Log; import org.chromium.base.VisibleForTesting; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.base.annotations.NativeClassQualifiedName; import org.chromium.base.annotations.UsedByReflection; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; /** * UrlRequestContext using Chromium HTTP stack implementation. */ @JNINamespace("cronet") @UsedByReflection("UrlRequestContext.java") class CronetUrlRequestContext extends UrlRequestContext { private static final int LOG_NONE = 3; // LOG(FATAL), no VLOG. private static final int LOG_DEBUG = -1; // LOG(FATAL...INFO), VLOG(1) private static final int LOG_VERBOSE = -2; // LOG(FATAL...INFO), VLOG(2) static final String LOG_TAG = "ChromiumNetwork"; /** * Synchronize access to mUrlRequestContextAdapter and shutdown routine. */ private final Object mLock = new Object(); private final ConditionVariable mInitCompleted = new ConditionVariable(false); private final AtomicInteger mActiveRequestCount = new AtomicInteger(0); private long mUrlRequestContextAdapter = 0; private Thread mNetworkThread; @UsedByReflection("UrlRequestContext.java") public CronetUrlRequestContext(Context context, UrlRequestContextConfig config) { CronetLibraryLoader.ensureInitialized(context, config); nativeSetMinLogLevel(getLoggingLevel()); mUrlRequestContextAdapter = nativeCreateRequestContextAdapter(config.toString()); if (mUrlRequestContextAdapter == 0) { throw new NullPointerException("Context Adapter creation failed."); } // Init native Chromium URLRequestContext on main UI thread. Runnable task = new Runnable() { @Override public void run() { synchronized (mLock) { // mUrlRequestContextAdapter is guaranteed to exist until // initialization on main and network threads completes and // initNetworkThread is called back on network thread. nativeInitRequestContextOnMainThread(mUrlRequestContextAdapter); } } }; // Run task immediately or post it to the UI thread. if (Looper.getMainLooper() == Looper.myLooper()) { task.run(); } else { new Handler(Looper.getMainLooper()).post(task); } } @Override public UrlRequest createRequest(String url, UrlRequestListener listener, Executor executor) { synchronized (mLock) { checkHaveAdapter(); return new CronetUrlRequest(this, mUrlRequestContextAdapter, url, UrlRequest.REQUEST_PRIORITY_MEDIUM, listener, executor); } } @Override public UrlRequest createRequest(String url, UrlRequestListener listener, Executor executor, int priority) { synchronized (mLock) { checkHaveAdapter(); return new CronetUrlRequest(this, mUrlRequestContextAdapter, url, priority, listener, executor); } } @Override public boolean isEnabled() { return Build.VERSION.SDK_INT >= 14; } @Override public String getVersionString() { return "Cronet/" + Version.getVersion(); } @Override public void shutdown() { synchronized (mLock) { checkHaveAdapter(); if (mActiveRequestCount.get() != 0) { throw new IllegalStateException( "Cannot shutdown with active requests."); } // Destroying adapter stops the network thread, so it cannot be // called on network thread. if (Thread.currentThread() == mNetworkThread) { throw new IllegalThreadStateException( "Cannot shutdown from network thread."); } } // Wait for init to complete on main and network thread (without lock, // so other thread could access it). mInitCompleted.block(); synchronized (mLock) { // It is possible that adapter is already destroyed on another thread. if (!haveRequestContextAdapter()) { return; } nativeDestroy(mUrlRequestContextAdapter); mUrlRequestContextAdapter = 0; } } @Override public void startNetLogToFile(String fileName, boolean logAll) { synchronized (mLock) { checkHaveAdapter(); nativeStartNetLogToFile(mUrlRequestContextAdapter, fileName, logAll); } } @Override public void stopNetLog() { synchronized (mLock) { checkHaveAdapter(); nativeStopNetLog(mUrlRequestContextAdapter); } } /** * Mark request as started to prevent shutdown when there are active * requests. */ void onRequestStarted(UrlRequest urlRequest) { mActiveRequestCount.incrementAndGet(); } /** * Mark request as completed to allow shutdown when there are no active * requests. */ void onRequestDestroyed(UrlRequest urlRequest) { mActiveRequestCount.decrementAndGet(); } @VisibleForTesting long getUrlRequestContextAdapter() { synchronized (mLock) { checkHaveAdapter(); return mUrlRequestContextAdapter; } } private void checkHaveAdapter() throws IllegalStateException { if (!haveRequestContextAdapter()) { throw new IllegalStateException("Context is shut down."); } } private boolean haveRequestContextAdapter() { return mUrlRequestContextAdapter != 0; } /** * @return loggingLevel see {@link #LOG_NONE}, {@link #LOG_DEBUG} and * {@link #LOG_VERBOSE}. */ private int getLoggingLevel() { int loggingLevel; if (Log.isLoggable(LOG_TAG, Log.VERBOSE)) { loggingLevel = LOG_VERBOSE; } else if (Log.isLoggable(LOG_TAG, Log.DEBUG)) { loggingLevel = LOG_DEBUG; } else { loggingLevel = LOG_NONE; } return loggingLevel; } @SuppressWarnings("unused") @CalledByNative private void initNetworkThread() { synchronized (mLock) { mNetworkThread = Thread.currentThread(); mInitCompleted.open(); } Thread.currentThread().setName("ChromiumNet"); Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); } // Native methods are implemented in cronet_url_request_context.cc. private static native long nativeCreateRequestContextAdapter(String config); private static native int nativeSetMinLogLevel(int loggingLevel); @NativeClassQualifiedName("CronetURLRequestContextAdapter") private native void nativeDestroy(long nativePtr); @NativeClassQualifiedName("CronetURLRequestContextAdapter") private native void nativeStartNetLogToFile(long nativePtr, String fileName, boolean logAll); @NativeClassQualifiedName("CronetURLRequestContextAdapter") private native void nativeStopNetLog(long nativePtr); @NativeClassQualifiedName("CronetURLRequestContextAdapter") private native void nativeInitRequestContextOnMainThread(long nativePtr); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.chukwa.hicc; import java.io.*; import java.nio.charset.Charset; import java.util.*; import javax.servlet.*; import javax.servlet.http.*; import java.sql.*; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.chukwa.util.XssFilter; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import org.apache.hadoop.chukwa.util.ExceptionUtil; public class Workspace extends HttpServlet { public static final long serialVersionUID = 101L; private static final Log log = LogFactory.getLog(Workspace.class); private String path = System.getenv("CHUKWA_DATA_DIR"); private JSONObject hash = new JSONObject(); transient private XssFilter xf; @Override protected void doTrace(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); } public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { xf = new XssFilter(request); response.setContentType("text/plain"); String method = xf.getParameter("method"); if (method.equals("get_views_list")) { getViewsList(request, response); } if (method.equals("get_view")) { getView(request, response); } if (method.equals("save_view")) { saveView(request, response); } if (method.equals("change_view_info")) { changeViewInfo(request, response); } if (method.equals("get_widget_list")) { getWidgetList(request, response); } if (method.equals("clone_view")) { cloneView(request, response); } if (method.equals("delete_view")) { deleteView(request, response); } } public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { doGet(request, response); } static public String getContents(File aFile) { // ...checks on aFile are elided StringBuffer contents = new StringBuffer(); try { // use buffering, reading one line at a time // FileReader always assumes default encoding is OK! BufferedReader input = new BufferedReader(new InputStreamReader(new FileInputStream(aFile.getAbsolutePath()), Charset.forName("UTF-8"))); try { String line = null; // not declared within while loop /* * readLine is a bit quirky : it returns the content of a line MINUS the * newline. it returns null only for the END of the stream. it returns * an empty String if two newlines appear in a row. */ while ((line = input.readLine()) != null) { contents.append(line); contents.append(System.getProperty("line.separator")); } } finally { input.close(); } } catch (IOException ex) { ex.printStackTrace(); } return contents.toString(); } public void setContents(String fName, String buffer) { try { BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fName), Charset.forName("UTF-8"))); out.write(buffer); out.close(); } catch (Exception e) { System.err.println("Error: " + e.getMessage()); } } public void cloneView(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { PrintWriter out = response.getWriter(); String name = xf.getParameter("name"); String template = xf.getParameter("clone_name"); File aFile = new File(path + "/views/" + template); String config = getContents(aFile); int i = 0; boolean check = true; while (check) { String tmpName = name; if (i > 0) { tmpName = name + i; } File checkFile = new File(path + "/views/" + tmpName + ".view"); check = checkFile.exists(); if (!check) { name = tmpName; } i = i + 1; } setContents(path + "/views/" + name + ".view", config); File deleteCache = new File(path + "/views/workspace_view_list.cache"); if(!deleteCache.delete()) { log.warn("Can not delete "+path + "/views/workspace_view_list.cache"); } genViewCache(path + "/views"); aFile = new File(path + "/views/workspace_view_list.cache"); String viewsCache = getContents(aFile); out.println(viewsCache); } public void deleteView(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { String name = xf.getParameter("name"); File aFile = new File(path + "/views/" + name + ".view"); if(!aFile.delete()) { log.warn("Can not delete " + path + "/views/" + name + ".view"); } File deleteCache = new File(path + "/views/workspace_view_list.cache"); if(!deleteCache.delete()) { log.warn("Can not delete "+path + "/views/workspace_view_list.cache"); } genViewCache(path + "/views"); } public void getViewsList(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { PrintWriter out = response.getWriter(); genViewCache(path + "/views"); File aFile = new File(path + "/views/workspace_view_list.cache"); String viewsCache = getContents(aFile); out.println(viewsCache); } public void getView(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { PrintWriter out = response.getWriter(); String id = xf.getParameter("id"); genViewCache(path + "/views"); File aFile = new File(path + "/views/" + id + ".view"); String view = getContents(aFile); out.println(view); } public void changeViewInfo(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { PrintWriter out = response.getWriter(); String id = xf.getParameter("name"); String config = request.getParameter("config"); try { JSONObject jt = (JSONObject) JSONValue.parse(config); File aFile = new File(path + "/views/" + id + ".view"); String original = getContents(aFile); JSONObject updateObject = (JSONObject) JSONValue.parse(original); updateObject.put("description", jt.get("description")); setContents(path + "/views/" + id + ".view", updateObject.toString()); if (!rename(id, jt.get("description").toString())) { throw new Exception("Rename view file failed"); } File deleteCache = new File(path + "/views/workspace_view_list.cache"); if(!deleteCache.delete()) { log.warn("Can not delete "+path + "/views/workspace_view_list.cache"); } genViewCache(path + "/views"); out.println("Workspace is stored successfully."); } catch (Exception e) { out.println("Workspace store failed."); } } public void saveView(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { PrintWriter out = response.getWriter(); String id = xf.getParameter("name"); String config = request.getParameter("config"); setContents(path + "/views/" + id + ".view", config); out.println("Workspace is stored successfully."); } public void getWidgetList(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { PrintWriter out = response.getWriter(); genWidgetCache(path + "/descriptors"); File aFile = new File(path + "/descriptors/workspace_plugin.cache"); String viewsCache = getContents(aFile); out.println(viewsCache); } private void genViewCache(String source) { File cacheFile = new File(source + "/workspace_view_list.cache"); if (!cacheFile.exists()) { File dir = new File(source); File[] filesWanted = dir.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(".view"); } }); if(filesWanted!=null) { JSONObject[] cacheGroup = new JSONObject[filesWanted.length]; for (int i = 0; i < filesWanted.length; i++) { String buffer = getContents(filesWanted[i]); try { JSONObject jt = (JSONObject) JSONValue.parse(buffer); String fn = filesWanted[i].getName(); jt.put("key", fn.substring(0, (fn.length() - 5))); cacheGroup[i] = jt; } catch (Exception e) { log.debug(ExceptionUtil.getStackTrace(e)); } } String viewList = convertObjectsToViewList(cacheGroup); setContents(source + "/workspace_view_list.cache", viewList); } } } public String convertObjectsToViewList(JSONObject[] objArray) { JSONArray jsonArr = new JSONArray(); JSONObject permission = new JSONObject(); JSONObject user = new JSONObject(); try { permission.put("read", 1); permission.put("modify", 1); user.put("all", permission); } catch (Exception e) { System.err.println("JSON Exception: " + e.getMessage()); } for (int i = 0; i < objArray.length; i++) { try { JSONObject jsonObj = new JSONObject(); jsonObj.put("key", objArray[i].get("key")); jsonObj.put("description", objArray[i].get("description")); jsonObj.put("owner", ""); jsonObj.put("permission", user); jsonArr.add(jsonObj); } catch (Exception e) { System.err.println("JSON Exception: " + e.getMessage()); } } return jsonArr.toString(); } private void genWidgetCache(String source) { File cacheFile = new File(source + "/workspace_plugin.cache"); File cacheDir = new File(source); if (!cacheFile.exists() || cacheFile.lastModified() < cacheDir.lastModified()) { File dir = new File(source); File[] filesWanted = dir.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(".descriptor"); } }); if(filesWanted!=null) { JSONObject[] cacheGroup = new JSONObject[filesWanted.length]; for (int i = 0; i < filesWanted.length; i++) { String buffer = getContents(filesWanted[i]); try { JSONObject jt = (JSONObject) JSONValue.parse(buffer); cacheGroup[i] = jt; } catch (Exception e) { log.debug(ExceptionUtil.getStackTrace(e)); } } String widgetList = convertObjectsToWidgetList(cacheGroup); setContents(source + "/workspace_plugin.cache", widgetList); } } } public String convertObjectsToWidgetList(JSONObject[] objArray) { JSONObject jsonObj = new JSONObject(); JSONArray jsonArr = new JSONArray(); for (int i = 0; i < objArray.length; i++) { jsonArr.add(objArray[i]); } try { jsonObj.put("detail", jsonArr); } catch (Exception e) { System.err.println("JSON Exception: " + e.getMessage()); } for (int i = 0; i < objArray.length; i++) { try { String[] categoriesArray = objArray[i].get("categories").toString() .split(","); hash = addToHash(hash, categoriesArray, objArray[i]); } catch (Exception e) { System.err.println("JSON Exception: " + e.getMessage()); } } try { jsonObj.put("children", hash); } catch (Exception e) { System.err.println("JSON Exception: " + e.getMessage()); } return jsonObj.toString(); } public JSONObject addToHash(JSONObject hash, String[] categoriesArray, JSONObject obj) { JSONObject subHash = hash; for (int i = 0; i < categoriesArray.length; i++) { String id = categoriesArray[i]; if (i >= categoriesArray.length - 1) { try { subHash.put("leaf:" + obj.get("title"), obj.get("id")); } catch (Exception e) { System.err.println("JSON Exception: " + e.getMessage()); } } else { try { subHash = (JSONObject) subHash.get("node:" + id); } catch (Exception e) { try { JSONObject tmpHash = new JSONObject(); subHash.put("node:" + id, tmpHash); subHash = tmpHash; } catch (Exception ex) { log.debug(ExceptionUtil.getStackTrace(e)); } } } } return hash; } private boolean rename(String id, String desc) { try { File view = new File(path + "/views/" + id + ".view"); File newFile = new File(path + File.separator + "views" + File.separator + desc + ".view"); if(!view.renameTo(newFile)) { log.warn("Can not rename " + path + "/views/" + id + ".view to " + path + File.separator + "views" + File.separator + desc + ".view"); } } catch (Exception e) { return false; } return true; } }
package org.jabref.gui.entryeditor; import java.io.File; import java.nio.file.Path; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.inject.Inject; import javafx.fxml.FXML; import javafx.geometry.Side; import javafx.scene.control.Button; import javafx.scene.control.ContextMenu; import javafx.scene.control.Label; import javafx.scene.control.MenuItem; import javafx.scene.control.Tab; import javafx.scene.control.TabPane; import javafx.scene.input.DataFormat; import javafx.scene.input.KeyEvent; import javafx.scene.input.TransferMode; import javafx.scene.layout.BorderPane; import org.jabref.gui.BasePanel; import org.jabref.gui.DialogService; import org.jabref.gui.GUIGlobals; import org.jabref.gui.StateManager; import org.jabref.gui.bibtexkeypattern.GenerateBibtexKeySingleAction; import org.jabref.gui.entryeditor.fileannotationtab.FileAnnotationTab; import org.jabref.gui.externalfiles.ExternalFilesEntryLinker; import org.jabref.gui.externalfiletype.ExternalFileTypes; import org.jabref.gui.help.HelpAction; import org.jabref.gui.keyboard.KeyBinding; import org.jabref.gui.menus.ChangeEntryTypeMenu; import org.jabref.gui.mergeentries.FetchAndMergeEntry; import org.jabref.gui.undo.CountingUndoManager; import org.jabref.gui.util.ColorUtil; import org.jabref.gui.util.DefaultTaskExecutor; import org.jabref.gui.util.TaskExecutor; import org.jabref.logic.TypedBibEntry; import org.jabref.logic.help.HelpFile; import org.jabref.logic.importer.EntryBasedFetcher; import org.jabref.logic.importer.WebFetchers; import org.jabref.model.database.BibDatabaseContext; import org.jabref.model.entry.BibEntry; import org.jabref.model.entry.field.Field; import org.jabref.model.util.FileUpdateMonitor; import org.jabref.preferences.PreferencesService; import org.jabref.preferences.PreviewPreferences; import com.airhacks.afterburner.views.ViewLoader; import org.fxmisc.easybind.EasyBind; import org.fxmisc.easybind.Subscription; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * GUI component that allows editing of the fields of a BibEntry (i.e. the one that shows up, when you double click on * an entry in the table) * <p> * It hosts the tabs (required, general, optional) and the buttons to the left. * <p> * EntryEditor also registers itself to the event bus, receiving events whenever a field of the entry changes, enabling * the text fields to update themselves if the change is made from somewhere else. */ public class EntryEditor extends BorderPane { private static final Logger LOGGER = LoggerFactory.getLogger(EntryEditor.class); private final BasePanel panel; private final BibDatabaseContext databaseContext; private final EntryEditorPreferences entryEditorPreferences; private final ExternalFilesEntryLinker fileLinker; private final List<EntryEditorTab> tabs; private Subscription typeSubscription; private BibEntry entry; // A reference to the entry this editor works on. private SourceTab sourceTab; @FXML private TabPane tabbed; @FXML private Button typeChangeButton; @FXML private Button fetcherButton; @FXML private Label typeLabel; @Inject private DialogService dialogService; @Inject private TaskExecutor taskExecutor; @Inject private PreferencesService preferencesService; @Inject private StateManager stateManager; @Inject private FileUpdateMonitor fileMonitor; @Inject private CountingUndoManager undoManager; private final List<EntryEditorTab> entryEditorTabs = new LinkedList<>(); public EntryEditor(BasePanel panel, ExternalFileTypes externalFileTypes) { this.panel = panel; this.databaseContext = panel.getBibDatabaseContext(); ViewLoader.view(this) .root(this) .load(); this.entryEditorPreferences = preferencesService.getEntryEditorPreferences(); this.fileLinker = new ExternalFilesEntryLinker(externalFileTypes, preferencesService.getFilePreferences(), databaseContext); if (GUIGlobals.currentFont != null) { setStyle(String.format("text-area-background: %s;text-area-foreground: %s;text-area-highlight: %s;", ColorUtil.toHex(GUIGlobals.validFieldBackgroundColor), ColorUtil.toHex(GUIGlobals.editorTextColor), ColorUtil.toHex(GUIGlobals.activeBackgroundColor))); } EasyBind.subscribe(tabbed.getSelectionModel().selectedItemProperty(), tab -> { EntryEditorTab activeTab = (EntryEditorTab) tab; if (activeTab != null) { activeTab.notifyAboutFocus(entry); } }); setupKeyBindings(); this.tabs = createTabs(); this.setOnDragOver(event -> { if (event.getDragboard().hasFiles()) { event.acceptTransferModes(TransferMode.COPY, TransferMode.MOVE, TransferMode.LINK); } event.consume(); }); this.setOnDragDropped(event -> { BibEntry entry = this.getEntry(); boolean success = false; if (event.getDragboard().hasContent(DataFormat.FILES)) { List<Path> files = event.getDragboard().getFiles().stream().map(File::toPath).collect(Collectors.toList()); FileDragDropPreferenceType dragDropPreferencesType = preferencesService.getEntryEditorFileLinkPreference(); if (dragDropPreferencesType == FileDragDropPreferenceType.MOVE) { if (event.getTransferMode() == TransferMode.LINK) { // Alt on Windows LOGGER.debug("Mode LINK"); fileLinker.addFilesToEntry(entry, files); } else if (event.getTransferMode() == TransferMode.COPY) { // Ctrl on Windows, no modifier on Xubuntu LOGGER.debug("Mode COPY"); fileLinker.copyFilesToFileDirAndAddToEntry(entry, files); } else { // Shift on Windows or no modifier LOGGER.debug("Mode MOVE"); fileLinker.moveFilesToFileDirAndAddToEntry(entry, files); } success = true; } if (dragDropPreferencesType == FileDragDropPreferenceType.COPY) { if (event.getTransferMode() == TransferMode.COPY) { // Ctrl on Windows, no modifier on Xubuntu LOGGER.debug("Mode MOVE"); fileLinker.moveFilesToFileDirAndAddToEntry(entry, files); } else if (event.getTransferMode() == TransferMode.LINK) { // Alt on Windows LOGGER.debug("Mode LINK"); fileLinker.addFilesToEntry(entry, files); } else { // Shift on Windows or no modifier LOGGER.debug("Mode COPY"); fileLinker.copyFilesToFileDirAndAddToEntry(entry, files); } success = true; } if (dragDropPreferencesType == FileDragDropPreferenceType.LINK) { if (event.getTransferMode() == TransferMode.COPY) { // Ctrl on Windows, no modifier on Xubuntu LOGGER.debug("Mode COPY"); fileLinker.copyFilesToFileDirAndAddToEntry(entry, files); } else if (event.getTransferMode() == TransferMode.LINK) { // Alt on Windows LOGGER.debug("Mode MOVE"); fileLinker.moveFilesToFileDirAndAddToEntry(entry, files); } else { // Shift on Windows or no modifier LOGGER.debug("Mode LINK"); fileLinker.addFilesToEntry(entry, files); } success = true; } } event.setDropCompleted(success); event.consume(); }); } /** * Set-up key bindings specific for the entry editor. */ private void setupKeyBindings() { this.addEventHandler(KeyEvent.KEY_PRESSED, event -> { Optional<KeyBinding> keyBinding = entryEditorPreferences.getKeyBindings().mapToKeyBinding(event); if (keyBinding.isPresent()) { switch (keyBinding.get()) { case ENTRY_EDITOR_NEXT_PANEL: case ENTRY_EDITOR_NEXT_PANEL_2: tabbed.getSelectionModel().selectNext(); event.consume(); break; case ENTRY_EDITOR_PREVIOUS_PANEL: case ENTRY_EDITOR_PREVIOUS_PANEL_2: tabbed.getSelectionModel().selectPrevious(); event.consume(); break; case ENTRY_EDITOR_NEXT_ENTRY: panel.selectNextEntry(); event.consume(); break; case ENTRY_EDITOR_PREVIOUS_ENTRY: panel.selectPreviousEntry(); event.consume(); break; case HELP: HelpAction.openHelpPage(HelpFile.ENTRY_EDITOR); event.consume(); break; case CLOSE: case CLOSE_ENTRY: close(); event.consume(); break; default: // Pass other keys to parent } } }); } @FXML public void close() { panel.entryEditorClosing(); } @FXML private void deleteEntry() { panel.delete(entry); } @FXML void generateCiteKeyButton() { GenerateBibtexKeySingleAction action = new GenerateBibtexKeySingleAction(getEntry(), databaseContext, dialogService, entryEditorPreferences, undoManager); action.execute(); } @FXML private void navigateToPreviousEntry() { panel.selectPreviousEntry(); } @FXML private void navigateToNextEntry() { panel.selectNextEntry(); } private List<EntryEditorTab> createTabs() { // Required fields entryEditorTabs.add(new RequiredFieldsTab(databaseContext, panel.getSuggestionProviders(), undoManager, dialogService)); // Optional fields entryEditorTabs.add(new OptionalFieldsTab(databaseContext, panel.getSuggestionProviders(), undoManager, dialogService)); entryEditorTabs.add(new OptionalFields2Tab(databaseContext, panel.getSuggestionProviders(), undoManager, dialogService)); entryEditorTabs.add(new DeprecatedFieldsTab(databaseContext, panel.getSuggestionProviders(), undoManager, dialogService)); // Other fields entryEditorTabs.add(new OtherFieldsTab(databaseContext, panel.getSuggestionProviders(), undoManager, entryEditorPreferences.getCustomTabFieldNames(), dialogService)); // General fields from preferences for (Map.Entry<String, Set<Field>> tab : entryEditorPreferences.getEntryEditorTabList().entrySet()) { entryEditorTabs.add(new UserDefinedFieldsTab(tab.getKey(), tab.getValue(), databaseContext, panel.getSuggestionProviders(), undoManager, dialogService)); } // Special tabs entryEditorTabs.add(new MathSciNetTab()); entryEditorTabs.add(new FileAnnotationTab(panel.getAnnotationCache())); entryEditorTabs.add(new RelatedArticlesTab(this, entryEditorPreferences, dialogService)); // Source tab sourceTab = new SourceTab(databaseContext, undoManager, entryEditorPreferences.getLatexFieldFormatterPreferences(), entryEditorPreferences.getImportFormatPreferences(), fileMonitor, dialogService, stateManager); entryEditorTabs.add(sourceTab); // LaTeX citations tab entryEditorTabs.add(new LatexCitationsTab(databaseContext, preferencesService, taskExecutor, dialogService)); return entryEditorTabs; } private void recalculateVisibleTabs() { List<Tab> visibleTabs = tabs.stream().filter(tab -> tab.shouldShow(entry)).collect(Collectors.toList()); // Start of ugly hack: // We need to find out, which tabs will be shown and which not and remove and re-add the appropriate tabs // to the editor. We don't want to simply remove all and re-add the complete list of visible tabs, because // the tabs give an ugly animation the looks like all tabs are shifting in from the right. // This hack is required since tabbed.getTabs().setAll(visibleTabs) changes the order of the tabs in the editor // First, remove tabs that we do not want to show List<EntryEditorTab> toBeRemoved = tabs.stream().filter(tab -> !tab.shouldShow(entry)).collect(Collectors.toList()); tabbed.getTabs().removeAll(toBeRemoved); // Next add all the visible tabs (if not already present) at the right position for (int i = 0; i < visibleTabs.size(); i++) { Tab toBeAdded = visibleTabs.get(i); Tab shown = null; if (i < tabbed.getTabs().size()) { shown = tabbed.getTabs().get(i); } if (!toBeAdded.equals(shown)) { tabbed.getTabs().add(i, toBeAdded); } } } /** * @return the currently edited entry */ public BibEntry getEntry() { return entry; } /** * Sets the entry to edit. */ public void setEntry(BibEntry entry) { Objects.requireNonNull(entry); // Remove subscription for old entry if existing if (typeSubscription != null) { typeSubscription.unsubscribe(); } this.entry = entry; recalculateVisibleTabs(); if (entryEditorPreferences.showSourceTabByDefault()) { tabbed.getSelectionModel().select(sourceTab); } // Notify current tab about new entry getSelectedTab().notifyAboutFocus(entry); setupToolBar(); // Subscribe to type changes for rebuilding the currently visible tab typeSubscription = EasyBind.subscribe(this.entry.typeProperty(), type -> { typeLabel.setText(new TypedBibEntry(entry, databaseContext.getMode()).getTypeForDisplay()); recalculateVisibleTabs(); getSelectedTab().notifyAboutFocus(entry); }); } private EntryEditorTab getSelectedTab() { return (EntryEditorTab) tabbed.getSelectionModel().getSelectedItem(); } private void setupToolBar() { // Update type label TypedBibEntry typedEntry = new TypedBibEntry(entry, databaseContext.getMode()); typeLabel.setText(typedEntry.getTypeForDisplay()); // Add type change menu ContextMenu typeMenu = new ChangeEntryTypeMenu().getChangeEntryTypePopupMenu(entry, databaseContext, undoManager); typeLabel.setOnMouseClicked(event -> typeMenu.show(typeLabel, Side.RIGHT, 0, 0)); typeChangeButton.setOnMouseClicked(event -> typeMenu.show(typeChangeButton, Side.RIGHT, 0, 0)); // Add menu for fetching bibliographic information ContextMenu fetcherMenu = new ContextMenu(); for (EntryBasedFetcher fetcher : WebFetchers.getEntryBasedFetchers(entryEditorPreferences.getImportFormatPreferences())) { MenuItem fetcherMenuItem = new MenuItem(fetcher.getName()); fetcherMenuItem.setOnAction(event -> fetchAndMerge(fetcher)); fetcherMenu.getItems().add(fetcherMenuItem); } fetcherButton.setOnMouseClicked(event -> fetcherMenu.show(fetcherButton, Side.RIGHT, 0, 0)); } private void fetchAndMerge(EntryBasedFetcher fetcher) { new FetchAndMergeEntry(panel, taskExecutor).fetchAndMerge(entry, fetcher); } public void setFocusToField(Field field) { DefaultTaskExecutor.runInJavaFXThread(() -> { for (Tab tab : tabbed.getTabs()) { if ((tab instanceof FieldsEditorTab) && ((FieldsEditorTab) tab).getShownFields().contains(field)) { FieldsEditorTab fieldsEditorTab = (FieldsEditorTab) tab; tabbed.getSelectionModel().select(tab); fieldsEditorTab.requestFocus(field); } } }); } public void updatePreviewInTabs(PreviewPreferences previewPreferences) { for (Tab tab : this.entryEditorTabs) { if (tab instanceof FieldsEditorTab) { ((FieldsEditorTab) tab).previewPanel.updateLayout(previewPreferences); } } } }
/* * Copyright 2012 GitHub Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.zion; import static org.eclipse.egit.github.core.RepositoryId.createFromUrl; import android.content.Intent; import java.io.Serializable; import java.util.ArrayList; import org.eclipse.egit.github.core.Gist; import org.eclipse.egit.github.core.GistFile; import org.eclipse.egit.github.core.Issue; import org.eclipse.egit.github.core.Repository; import org.eclipse.egit.github.core.RepositoryId; import org.eclipse.egit.github.core.User; /** * Helper for creating intents */ public class Intents { /** * Prefix for all intents created */ public static final String INTENT_PREFIX = "jp.forkhub.mobile."; /** * Prefix for all extra data added to intents */ public static final String INTENT_EXTRA_PREFIX = INTENT_PREFIX + "extra."; /** * Repository handle */ public static final String EXTRA_REPOSITORY = INTENT_EXTRA_PREFIX + "REPOSITORY"; /** * Repository ids collection handle */ public static final String EXTRA_REPOSITORIES = INTENT_EXTRA_PREFIX + "REPOSITORIES"; /** * Repository name */ public static final String EXTRA_REPOSITORY_NAME = INTENT_EXTRA_PREFIX + "REPOSITORY_NAME"; /** * Repository owner */ public static final String EXTRA_REPOSITORY_OWNER = INTENT_EXTRA_PREFIX + "REPOSITORY_OWNER"; /** * Issue number */ public static final String EXTRA_ISSUE_NUMBER = INTENT_EXTRA_PREFIX + "ISSUE_NUMBER"; /** * Issue handle */ public static final String EXTRA_ISSUE = INTENT_EXTRA_PREFIX + "ISSUE"; /** * Issue number collection handle */ public static final String EXTRA_ISSUE_NUMBERS = INTENT_EXTRA_PREFIX + "ISSUE_NUMBERS"; /** * Gist id */ public static final String EXTRA_GIST_ID = INTENT_EXTRA_PREFIX + "GIST_ID"; /** * List of Gist ids */ public static final String EXTRA_GIST_IDS = INTENT_EXTRA_PREFIX + "GIST_IDS"; /** * Gist handle */ public static final String EXTRA_GIST = INTENT_EXTRA_PREFIX + "GIST"; /** * Gist file handle */ public static final String EXTRA_GIST_FILE = INTENT_EXTRA_PREFIX + "GIST_FILE"; /** * User handle */ public static final String EXTRA_USER = INTENT_EXTRA_PREFIX + "USER"; /** * {@link ArrayList} handle of {@link User} objects */ public static final String EXTRA_USERS = INTENT_EXTRA_PREFIX + "USERS"; /** * Boolean value which indicates if a user is a collaborator on the repo */ public static final String EXTRA_IS_COLLABORATOR = INTENT_EXTRA_PREFIX + "IS_COLLABORATOR"; /** * Issue filter handle */ public static final String EXTRA_ISSUE_FILTER = INTENT_EXTRA_PREFIX + "ISSUE_FILTER"; /** * Comment body */ public static final String EXTRA_COMMENT_BODY = INTENT_EXTRA_PREFIX + "COMMENT_BODY"; /** * Comments handle */ public static final String EXTRA_COMMENTS = INTENT_EXTRA_PREFIX + "COMMENTS"; /** * Comment handle */ public static final String EXTRA_COMMENT = INTENT_EXTRA_PREFIX + "COMMENT"; /** * Integer position */ public static final String EXTRA_POSITION = INTENT_EXTRA_PREFIX + "POSITION"; /** * Base commit name */ public static final String EXTRA_BASE = INTENT_EXTRA_PREFIX + "BASE"; /** * Base commit names */ public static final String EXTRA_BASES = INTENT_EXTRA_PREFIX + "BASES"; /** * Base commit name */ public static final String EXTRA_HEAD = INTENT_EXTRA_PREFIX + "HEAD"; /** * Handle to a {@link String} path */ public static final String EXTRA_PATH = INTENT_EXTRA_PREFIX + "PATH"; /** * Resolve the {@link RepositoryId} referenced by the given intent * * @param intent * @return repository id */ public static RepositoryId repoFrom(Intent intent) { String repoName = intent.getStringExtra(EXTRA_REPOSITORY_NAME); String repoOwner = intent.getStringExtra(EXTRA_REPOSITORY_OWNER); return RepositoryId.create(repoOwner, repoName); } /** * Builder for generating an intent configured with extra data such as an * issue, repository, or gist */ public static class Builder { private final Intent intent; /** * Create builder with suffix * * @param actionSuffix */ public Builder(String actionSuffix) { // actionSuffix = e.g. "repos.VIEW" intent = new Intent(INTENT_PREFIX + actionSuffix); } /** * Add repository id to intent being built up * * @param repositoryId * @return this builder */ public Builder repo(RepositoryId repositoryId) { return add(EXTRA_REPOSITORY_NAME, repositoryId.getName()).add( EXTRA_REPOSITORY_OWNER, repositoryId.getOwner()); } /** * Add repository to intent being built up * * @param repository * @return this builder */ public Builder repo(Repository repository) { return add(EXTRA_REPOSITORY, repository); } /** * Add issue to intent being built up * * @param issue * @return this builder */ public Builder issue(Issue issue) { return repo(createFromUrl(issue.getHtmlUrl())).add(EXTRA_ISSUE, issue).add(EXTRA_ISSUE_NUMBER, issue.getNumber()); } /** * Add gist to intent being built up * * @param gist * @return this builder */ public Builder gist(Gist gist) { return add(EXTRA_GIST, gist); } /** * Add gist id to intent being built up * * @param gist * @return this builder */ public Builder gist(String gist) { return add(EXTRA_GIST_ID, gist); } /** * Add gist file to intent being built up * * @param file * @return this builder */ public Builder gistFile(GistFile file) { return add(EXTRA_GIST_FILE, file); } /** * Add user to intent being built up * * @param user * @return this builder; */ public Builder user(User user) { return add(EXTRA_USER, user); } /** * Add extra field data value to intent being built up * * @param fieldName * @param value * @return this builder */ public Builder add(String fieldName, String value) { intent.putExtra(fieldName, value); return this; } /** * Add extra field data values to intent being built up * * @param fieldName * @param values * @return this builder */ public Builder add(String fieldName, CharSequence[] values) { intent.putExtra(fieldName, values); return this; } /** * Add extra field data value to intent being built up * * @param fieldName * @param value * @return this builder */ public Builder add(String fieldName, int value) { intent.putExtra(fieldName, value); return this; } /** * Add extra field data value to intent being built up * * @param fieldName * @param values * @return this builder */ public Builder add(String fieldName, int[] values) { intent.putExtra(fieldName, values); return this; } /** * Add extra field data value to intent being built up * * @param fieldName * @param values * @return this builder */ public Builder add(String fieldName, boolean[] values) { intent.putExtra(fieldName, values); return this; } /** * Add extra field data value to intent being built up * * @param fieldName * @param value * @return this builder */ public Builder add(String fieldName, Serializable value) { intent.putExtra(fieldName, value); return this; } /** * Get built intent * * @return intent */ public Intent toIntent() { return intent; } } }
/* * Copyright (C) 2015 University of Oregon * * You may distribute under the terms of either the GNU General Public * License or the Apache License, as specified in the LICENSE file. * * For more information, see the LICENSE file. */ package vnmr.bo; import java.awt.*; import java.util.*; import java.beans.*; import java.awt.dnd.*; import java.awt.event.*; import java.awt.datatransfer.*; import java.io.*; import javax.swing.*; import javax.swing.table.TableColumn; import vnmr.util.*; import vnmr.ui.*; import vnmr.part11.*; import vnmr.templates.*; public class VComboFileTable extends ComboFileTable implements VObjIF, VObjDef, DropTargetListener, ExpListenerIF, PropertyChangeListener { private String type = null; private String fileName = null; private String fileType="file"; private String fg = null; private String bg = null; private String selVars = null; private String vnmrCmd = null; private String vnmrCmd2 = null; private String showVal = null; private String setVal = null; private String fontName = null; private String fontStyle = null; private String fontSize = null; private Color fgColor = null; private Color bgColor, orgBg; private Font font = null; private String keyStr = null; private MouseAdapter ml; private boolean isEditing = false; private boolean inEditMode = false; private boolean isFocused = false; private boolean inChangeMode = false; private boolean inAddMode = false; private int isActive = 1; private ButtonIF vnmrIf; private SessionShare sshare; private boolean inModalMode = false; private boolean fileExpr = false; private boolean tableValid=false; private boolean tableUpdate=false; private boolean debug=false; private File m_objFile; private long m_lTime = 0; private FileMenu m_menu; private FileTable m_table; private FileTableModel m_tableModel; public VComboFileTable(SessionShare ss, ButtonIF vif, String typ) { this.sshare = ss; this.type = typ; this.vnmrIf = vif; orgBg = getBackground(); bgColor = Util.getBgColor(); setOpaque(false); setBackground(bgColor); m_menu = getFileMenu(); m_table = getTable(); m_table.setBackground(Color.white); ml = new MouseAdapter() { public void mouseClicked(MouseEvent evt) { if(debug) System.out.println("VComboFileTable.mouseClicked "+vnmrCmd); int clicks = evt.getClickCount(); if(inModalMode || vnmrIf == null) return; if (inAddMode ||inChangeMode || inEditMode) return; if (isActive < 0) return; if (clicks >= 2) { sendValueCmd(); } else if (clicks == 1) { setValueCmd(m_table.getSelectedRow()); } } }; m_table.addMouseListener(ml); m_menu.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JComboBox cb = (JComboBox)e.getSource(); String path = (String)cb.getSelectedItem(); updateFileTable(); } }); new DropTarget(this, this); DisplayOptions.addChangeListener(this); } public String getRowValue(int row) { return((String)m_tableModel.getRowValue(row)); } public Vector getOutputTypes() { return(m_tableModel.getOutputTypes()); } public boolean isCellEditable(int row, int column) { return false; } private void setValueCmd(int row) { if(fileType.equals("cmdHistory")) vnmrCmd = getRowValue(row); else if(fileType.equals("records")) vnmrCmd = "rt('" + getRowValue(row) + "')"; else vnmrCmd = null; } private boolean getComboTable(String file) { if(inAddMode) return false; // assuming if file starts with "/", it is a full path. // otherwise append file to USER/PERSISTENCE/ String str = file; if(!file.startsWith("/")) str = FileUtil.savePath("USER/PERSISTENCE/"+file); String label = null; if(fileType.equals("comdHistory")) label = "Select a type"; else label = "Select a path"; Vector paths = new Vector(); paths.addElement(str); removeAll(); makeComboFileTable(paths, label, 0); return(true); } // PropertyChangeListener interface public void propertyChange(PropertyChangeEvent evt){ fgColor=DisplayOptions.getColor(fg); setForeground(fgColor); changeFont(); bgColor = Util.getBgColor(); setBackground(bgColor); } // VObjIF interface public void setDefLabel(String s) { } public void setDefColor(String c) { this.fg = c; fgColor = VnmrRgb.getColorByName(c); setForeground(fgColor); } public void setEditStatus(boolean s) { isEditing = s; repaint(); } public void setEditMode(boolean s) { } public void changeFont() { font=DisplayOptions.getFont(fontName,fontStyle,fontSize); setFont(font); repaint(); } public void changeFocus(boolean s) { isFocused = s; repaint(); } public String getAttribute(int attr) { int k; String s; switch (attr) { case TYPE: return type; case KEYSTR: return keyStr; case PANEL_FILE: return fileName; case PANEL_TYPE: return fileType; case FGCOLOR: return fg; case BGCOLOR: return bg; case SHOW: return showVal; case FONT_NAME: return fontName; case FONT_STYLE: return fontStyle; case FONT_SIZE: return fontSize; case CMD: return vnmrCmd; case SETVAL: return setVal; case VARIABLE: return selVars; case VAR2: k = m_table.getSelectedRow(); return (String)getRowValue(k); case VALUE: k = m_table.getSelectedRow(); return (String)getRowValue(k); default: return null; } } public void setAttribute(int attr, String c) { Vector v; switch (attr) { case TYPE: type = c; break; case PANEL_FILE: fileName = c; if (c != null) { if(c.startsWith("$VALUE") || c.startsWith("if")) fileExpr=true; } tableValid=false; break; case PANEL_TYPE: fileType = c; tableValid=false; break; case FGCOLOR: fg = c; fgColor=DisplayOptions.getColor(fg); setForeground(fgColor); repaint(); break; case BGCOLOR: bg = c; if (c == null || c.length()==0 || c.equals("default")) bgColor = Util.getBgColor(); else bgColor = DisplayOptions.getColor(c); setBackground(bgColor); repaint(); break; case SHOW: showVal = c; break; case FONT_NAME: fontName = c; break; case FONT_STYLE: fontStyle = c; break; case FONT_SIZE: fontSize = c; break; case VARIABLE: selVars = c; break; case VAR2: tableUpdate=true; int k = m_table.getSelectedRow(); setValueCmd(k); break; case SETVAL: setVal = c; break; case CMD: vnmrCmd = c; break; case VALUE: //inChangeMode = true; k = m_table.getSelectedRow(); setValueCmd(k); //inChangeMode = false; break; } } public ButtonIF getVnmrIF() { return vnmrIf; } public void setVnmrIF(ButtonIF vif) { vnmrIf = vif; } private void sendContentQuery(){ if(debug) System.out.println("VComboFileTable.sendContentQuery "+fileName); if (fileName != null) vnmrIf.asyncQueryParamNamed("content", this, fileName); else { setVisible(false); } } private void sendValueQuery(){ if(debug) System.out.println("VComboFileTable.sendValueQuery"); if (setVal != null) vnmrIf.asyncQueryParam(this, setVal); } private void sendShowQuery(){ if(debug) System.out.println("VComboFileTable.sendShowQuery"); if (showVal != null) vnmrIf.asyncQueryShow(this, showVal); } private void sendValueCmd(){ if(debug) System.out.println("VComboFileTable.sendValueCmd "+vnmrCmd); if (vnmrCmd != null) vnmrIf.sendVnmrCmd(this, vnmrCmd); } // ExpListenerIF interface public void updateValue(Vector params){ if (vnmrIf == null) return; if(debug) System.out.println("VComboFileTable.updateValue "+params); if(tableUpdate) updateContent(params); String vars=getAttribute(VARIABLE); if (vars == null) return; StringTokenizer tok=new StringTokenizer(vars, " ,\n"); while (tok.hasMoreTokens()) { String var = tok.nextToken(); for (int k = 0; k < params.size(); k++) { if (var.equals(params.elementAt(k))) { if(debug) System.out.println("VComboFileTable.updateValue "+var); if (showVal != null) sendShowQuery(); else if (setVal != null) sendValueQuery(); return; } } } } private void updateContent(Vector params){ if (vnmrIf == null || fileName == null) return; String vars=getAttribute(VAR2); if (vars == null) return; StringTokenizer tok=new StringTokenizer(vars, " ,\n"); while (tok.hasMoreTokens()) { String var = tok.nextToken(); for (int k = 0; k < params.size(); k++) { if (var.equals(params.elementAt(k))) { if(debug) System.out.println("VComboFileTable.updateContent "+var); tableValid=false; if(fileExpr) sendContentQuery(); else { getComboTable(fileName); } } } } } public void updateValue() { if (vnmrIf == null || inAddMode || inEditMode) return; if(debug) System.out.println("VComboFileTable.updateValue"); if(fileName != null && !tableValid){ if(fileExpr) sendContentQuery(); else { getComboTable(fileName); } } else if (!inEditMode) { if (m_table.getRowCount() <= 0) setVisible(false); else setVisible(true); } if (showVal != null) sendShowQuery(); else if (setVal != null) sendValueQuery(); } public void setValue(ParamIF pf) { if(pf == null || pf.value == null) return; if(debug) System.out.println("VComboFileTable.setValue "+pf.value+" "+pf.name); if(pf.name.equals("content")){ tableValid=false; getComboTable(pf.value); } } public void setShowValue(ParamIF pf) { if (pf != null && pf.value != null) { String s = pf.value.trim(); isActive = Integer.parseInt(s); if(debug) System.out.println("VComboFileTable.setShowValue "+pf.value+" "+pf.name); if (isActive > 0) setBackground(bgColor); else { if (isActive == 0) setBackground(Global.IDLECOLOR); else setBackground(Global.NPCOLOR); } if (isActive >= 0) { setEnabled(true); if (setVal != null) sendValueQuery(); } else setEnabled(false); } } public void paint(Graphics g) { super.paint(g); // Check if the file has been modified, and if so then get the current version. getCurrentFile(); if (!isEditing) return; if(!tableValid && !tableUpdate && !fileExpr) { getComboTable(fileName); } Dimension psize = getPreferredSize(); if (isFocused) g.setColor(Color.yellow); else g.setColor(Color.green); g.drawLine(0, 0, psize.width, 0); g.drawLine(0, 0, 0, psize.height); g.drawLine(0, psize.height-1, psize.width-1, psize.height-1); g.drawLine(psize.width -1, 0, psize.width-1, psize.height-1); } /** * Checks if the file has been modified by comparing the modify stamp. */ private void getCurrentFile() { if (m_objFile != null) { long lCurrTime = m_objFile.lastModified(); if (m_lTime != lCurrTime) { tableValid = false; fileExpr = false; if (fileName != null) { if (fileName.startsWith("$VALUE") || fileName.startsWith("if")) fileExpr = true; } updateValue(); } } } public void refresh() {} public void destroy() { DisplayOptions.removeChangeListener(this); } public void addDefChoice(String c) {} public void addDefValue(String c) {} public void itemStateChanged(ItemEvent e){} public void setDefLoc(int x, int y) {} public void dragEnter(DropTargetDragEvent e) { } public void dragExit(DropTargetEvent e) {} public void dragOver(DropTargetDragEvent e) {} public void dropActionChanged (DropTargetDragEvent e) {} public void drop(DropTargetDropEvent e) { VObjDropHandler.processDrop(e, this, inEditMode); } public void setModalMode(boolean s) { inModalMode = s; } public void sendVnmrCmd() { if (vnmrCmd == null || vnmrIf == null) return; if(debug) System.out.println("VComboFileTable.sendVnmrCmd"); sendValueCmd(); } private final static String[] m_types = {"records", "s_auditTrailFiles", "cmdHistory", "s_auditTrail", "d_auditTrail" }; private final static Object[][] attributes = { {new Integer(VARIABLE), "Selection variables:"}, {new Integer(VAR2), "Content variables:"}, {new Integer(SETVAL), "Value of item:"}, {new Integer(SHOW), "Enable condition:"}, {new Integer(CMD), "Vnmr command:"}, {new Integer(PANEL_FILE), "Table source:"}, {new Integer(PANEL_TYPE), "Table type:",m_types}, }; public Object[][] getAttributes() { return attributes; } public void setSizeRatio(double w, double h) {} public Point getDefLoc() { return getLocation(); } }