gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package org.apache.helix.manager.zk;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.I0Itec.zkclient.DataUpdater;
import org.apache.helix.AccessOption;
import org.apache.helix.BaseDataAccessor;
import org.apache.helix.PropertyPathConfig;
import org.apache.helix.PropertyType;
import org.apache.helix.TestHelper;
import org.apache.helix.ZNRecord;
import org.apache.helix.ZNRecordUpdater;
import org.apache.helix.ZkUnitTestBase;
import org.apache.helix.manager.zk.ZNRecordSerializer;
import org.apache.helix.manager.zk.ZkBaseDataAccessor;
import org.apache.helix.manager.zk.ZkBaseDataAccessor.AccessResult;
import org.apache.helix.manager.zk.ZkBaseDataAccessor.RetCode;
import org.apache.helix.manager.zk.ZkClient;
import org.apache.zookeeper.data.Stat;
import org.testng.Assert;
import org.testng.annotations.Test;
public class TestZkBaseDataAccessor extends ZkUnitTestBase {
@Test
public void testSyncSet() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String testName = className + "_" + methodName;
System.out.println("START " + testName + " at " + new Date(System.currentTimeMillis()));
String path = String.format("/%s/%s", testName, "msg_0");
ZNRecord record = new ZNRecord("msg_0");
BaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
boolean success = accessor.set(path, record, AccessOption.PERSISTENT);
Assert.assertTrue(success);
ZNRecord getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getId(), "msg_0");
System.out.println("END " + testName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testSyncSetWithVersion() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String testName = className + "_" + methodName;
System.out.println("START " + testName + " at " + new Date(System.currentTimeMillis()));
String path = String.format("/%s/%s", testName, "msg_0");
ZNRecord record = new ZNRecord("msg_0");
BaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
// set persistent
boolean success = accessor.set(path, record, 0, AccessOption.PERSISTENT);
Assert.assertFalse(success, "Should fail since version not match");
try {
_gZkClient.readData(path, false);
Assert.fail("Should get no node exception");
} catch (Exception e) {
// OK
}
success = accessor.set(path, record, -1, AccessOption.PERSISTENT);
Assert.assertTrue(success);
ZNRecord getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getId(), "msg_0");
// set ephemeral
path = String.format("/%s/%s", testName, "msg_1");
record = new ZNRecord("msg_1");
success = accessor.set(path, record, 0, AccessOption.EPHEMERAL);
Assert.assertFalse(success);
try {
_gZkClient.readData(path, false);
Assert.fail("Should get no node exception");
} catch (Exception e) {
// OK
}
success = accessor.set(path, record, -1, AccessOption.EPHEMERAL);
Assert.assertTrue(success);
getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getId(), "msg_1");
record.setSimpleField("key0", "value0");
success = accessor.set(path, record, 0, AccessOption.PERSISTENT);
Assert.assertTrue(success, "Should pass. AccessOption.PERSISTENT is ignored");
getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getSimpleFields().size(), 1);
Assert.assertNotNull(getRecord.getSimpleField("key0"));
Assert.assertEquals(getRecord.getSimpleField("key0"), "value0");
System.out.println("END " + testName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testSyncDoSet() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String testName = className + "_" + methodName;
System.out.println("START " + testName + " at " + new Date(System.currentTimeMillis()));
String path = String.format("/%s/%s/%s", testName, "msg_0", "submsg_0");
ZNRecord record = new ZNRecord("submsg_0");
ZkBaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
AccessResult result = accessor.doSet(path, record, -1, AccessOption.PERSISTENT);
Assert.assertEquals(result._retCode, RetCode.OK);
Assert.assertEquals(result._pathCreated.size(), 3);
Assert.assertTrue(result._pathCreated.contains(String.format("/%s", testName)));
Assert.assertTrue(result._pathCreated.contains(String.format("/%s/%s", testName, "msg_0")));
Assert.assertTrue(result._pathCreated.contains(path));
Assert.assertTrue(_gZkClient.exists(String.format("/%s", testName)));
Assert.assertTrue(_gZkClient.exists(String.format("/%s/%s", testName, "msg_0")));
ZNRecord getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getId(), "submsg_0");
System.out.println("END " + testName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testSyncCreate() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String testName = className + "_" + methodName;
System.out.println("START " + testName + " at " + new Date(System.currentTimeMillis()));
String path = String.format("/%s/%s", testName, "msg_0");
ZNRecord record = new ZNRecord("msg_0");
ZkBaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
boolean success = accessor.create(path, record, AccessOption.PERSISTENT);
Assert.assertTrue(success);
ZNRecord getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getId(), "msg_0");
record.setSimpleField("key0", "value0");
success = accessor.create(path, record, AccessOption.PERSISTENT);
Assert.assertFalse(success, "Should fail since node already exists");
getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getSimpleFields().size(), 0);
System.out.println("END " + testName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testSyncUpdate() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String testName = className + "_" + methodName;
System.out.println("START " + testName + " at " + new Date(System.currentTimeMillis()));
String path = String.format("/%s/%s", testName, "msg_0");
ZNRecord record = new ZNRecord("msg_0");
ZkBaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
boolean success = accessor.update(path, new ZNRecordUpdater(record), AccessOption.PERSISTENT);
Assert.assertTrue(success);
ZNRecord getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getId(), "msg_0");
record.setSimpleField("key0", "value0");
success = accessor.update(path, new ZNRecordUpdater(record), AccessOption.PERSISTENT);
Assert.assertTrue(success);
getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getSimpleFields().size(), 1);
Assert.assertNotNull(getRecord.getSimpleField("key0"));
Assert.assertEquals(getRecord.getSimpleField("key0"), "value0");
// test throw exception from updater
success = accessor.update(path, new DataUpdater<ZNRecord>() {
@Override
public ZNRecord update(ZNRecord currentData) {
throw new RuntimeException("IGNORABLE: test throw exception from updater");
}
}, AccessOption.PERSISTENT);
Assert.assertFalse(success);
getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getSimpleFields().size(), 1);
System.out.println("END " + testName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testSyncRemove() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String testName = className + "_" + methodName;
System.out.println("START " + testName + " at " + new Date(System.currentTimeMillis()));
String path = String.format("/%s/%s", testName, "msg_0");
ZNRecord record = new ZNRecord("msg_0");
ZkBaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
boolean success = accessor.remove(path, 0);
Assert.assertFalse(success);
success = accessor.create(path, record, AccessOption.PERSISTENT);
Assert.assertTrue(success);
ZNRecord getRecord = _gZkClient.readData(path);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getId(), "msg_0");
success = accessor.remove(path, 0);
Assert.assertTrue(success);
Assert.assertFalse(_gZkClient.exists(path));
System.out.println("END " + testName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testSyncGet() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String testName = className + "_" + methodName;
System.out.println("START " + testName + " at " + new Date(System.currentTimeMillis()));
String path = String.format("/%s/%s", testName, "msg_0");
ZNRecord record = new ZNRecord("msg_0");
ZkBaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
Stat stat = new Stat();
ZNRecord getRecord = accessor.get(path, stat, 0);
Assert.assertNull(getRecord);
try {
accessor.get(path, stat, AccessOption.THROW_EXCEPTION_IFNOTEXIST);
Assert.fail("Should throw exception if not exist");
} catch (Exception e) {
// OK
}
boolean success = accessor.create(path, record, AccessOption.PERSISTENT);
Assert.assertTrue(success);
getRecord = accessor.get(path, stat, 0);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getId(), "msg_0");
Assert.assertEquals(stat.getVersion(), 0);
record.setSimpleField("key0", "value0");
success = accessor.set(path, record, AccessOption.PERSISTENT);
Assert.assertTrue(success);
getRecord = accessor.get(path, stat, 0);
Assert.assertNotNull(getRecord);
Assert.assertEquals(record.getSimpleFields().size(), 1);
Assert.assertNotNull(getRecord.getSimpleField("key0"));
Assert.assertEquals(getRecord.getSimpleField("key0"), "value0");
Assert.assertEquals(stat.getVersion(), 1);
ZNRecord newRecord = new ZNRecord("msg_0");
newRecord.setSimpleField("key1", "value1");
success = accessor.update(path, new ZNRecordUpdater(newRecord), AccessOption.PERSISTENT);
Assert.assertTrue(success);
getRecord = accessor.get(path, stat, 0);
Assert.assertNotNull(getRecord);
Assert.assertEquals(getRecord.getSimpleFields().size(), 2);
Assert.assertNotNull(getRecord.getSimpleField("key0"));
Assert.assertEquals(getRecord.getSimpleField("key0"), "value0");
Assert.assertNotNull(getRecord.getSimpleField("key1"));
Assert.assertEquals(getRecord.getSimpleField("key1"), "value1");
Assert.assertEquals(stat.getVersion(), 2);
System.out.println("END " + testName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testSyncExist() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String testName = className + "_" + methodName;
System.out.println("START " + testName + " at " + new Date(System.currentTimeMillis()));
String path = String.format("/%s/%s", testName, "msg_0");
ZNRecord record = new ZNRecord("msg_0");
ZkBaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
boolean success = accessor.exists(path, 0);
Assert.assertFalse(success);
success = accessor.create(path, record, AccessOption.EPHEMERAL);
Assert.assertTrue(success);
success = accessor.exists(path, 0);
Assert.assertTrue(success);
System.out.println("END " + testName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testSyncGetStat() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String testName = className + "_" + methodName;
System.out.println("START " + testName + " at " + new Date(System.currentTimeMillis()));
String path = String.format("/%s/%s", testName, "msg_0");
ZNRecord record = new ZNRecord("msg_0");
ZkBaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
Stat stat = accessor.getStat(path, 0);
Assert.assertNull(stat);
boolean success = accessor.create(path, record, AccessOption.EPHEMERAL);
Assert.assertTrue(success);
stat = accessor.getStat(path, 0);
Assert.assertNotNull(stat);
Assert.assertEquals(stat.getVersion(), 0);
Assert.assertNotSame(stat.getEphemeralOwner(), 0);
System.out.println("END " + testName + " at " + new Date(System.currentTimeMillis()));
}
@Test
public void testAsyncZkBaseDataAccessor() {
System.out.println("START TestZkBaseDataAccessor.async at "
+ new Date(System.currentTimeMillis()));
String root = "TestZkBaseDataAccessor_asyn";
ZkClient zkClient = new ZkClient(ZK_ADDR);
zkClient.setZkSerializer(new ZNRecordSerializer());
zkClient.deleteRecursive("/" + root);
ZkBaseDataAccessor<ZNRecord> accessor = new ZkBaseDataAccessor<ZNRecord>(zkClient);
// test async createChildren
String parentPath = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1");
List<ZNRecord> records = new ArrayList<ZNRecord>();
List<String> paths = new ArrayList<String>();
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
paths.add(PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId));
records.add(new ZNRecord(msgId));
}
boolean[] success = accessor.createChildren(paths, records, AccessOption.PERSISTENT);
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
Assert.assertTrue(success[i], "Should succeed in create " + msgId);
}
// test get what we created
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
String path = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId);
ZNRecord record = zkClient.readData(path);
Assert.assertEquals(record.getId(), msgId, "Should get what we created");
}
// test async setChildren
parentPath = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1");
records = new ArrayList<ZNRecord>();
paths = new ArrayList<String>();
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
paths.add(PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId));
ZNRecord newRecord = new ZNRecord(msgId);
newRecord.setSimpleField("key1", "value1");
records.add(newRecord);
}
success = accessor.setChildren(paths, records, AccessOption.PERSISTENT);
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
Assert.assertTrue(success[i], "Should succeed in set " + msgId);
}
// test get what we set
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
String path = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId);
ZNRecord record = zkClient.readData(path);
Assert.assertEquals(record.getSimpleFields().size(), 1, "Should have 1 simple field set");
Assert.assertEquals(record.getSimpleField("key1"), "value1", "Should have value1 set");
}
// test async updateChildren
parentPath = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1");
// records = new ArrayList<ZNRecord>();
List<DataUpdater<ZNRecord>> znrecordUpdaters = new ArrayList<DataUpdater<ZNRecord>>();
paths = new ArrayList<String>();
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
paths.add(PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId));
ZNRecord newRecord = new ZNRecord(msgId);
newRecord.setSimpleField("key2", "value2");
// records.add(newRecord);
znrecordUpdaters.add(new ZNRecordUpdater(newRecord));
}
success = accessor.updateChildren(paths, znrecordUpdaters, AccessOption.PERSISTENT);
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
Assert.assertTrue(success[i], "Should succeed in update " + msgId);
}
// test get what we updated
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
String path = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId);
ZNRecord record = zkClient.readData(path);
Assert.assertEquals(record.getSimpleFields().size(), 2, "Should have 2 simple fields set");
Assert.assertEquals(record.getSimpleField("key2"), "value2", "Should have value2 set");
}
// test async getChildren
parentPath = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1");
records = accessor.getChildren(parentPath, null, 0);
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
ZNRecord record = records.get(i);
Assert.assertEquals(record.getId(), msgId, "Should get what we updated");
Assert.assertEquals(record.getSimpleFields().size(), 2, "Should have 2 simple fields set");
Assert.assertEquals(record.getSimpleField("key2"), "value2", "Should have value2 set");
}
// test async exists
parentPath = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1");
paths = new ArrayList<String>();
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
paths.add(PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId));
}
boolean[] exists = accessor.exists(paths, 0);
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
Assert.assertTrue(exists[i], "Should exist " + msgId);
}
// test async getStats
parentPath = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1");
paths = new ArrayList<String>();
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
paths.add(PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId));
}
Stat[] stats = accessor.getStats(paths, 0);
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
Assert.assertNotNull(stats[i], "Stat should exist for " + msgId);
Assert.assertEquals(stats[i].getVersion(), 2,
"DataVersion should be 2, since we set 1 and update 1 for " + msgId);
}
// test async remove
parentPath = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1");
paths = new ArrayList<String>();
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
paths.add(PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId));
}
success = accessor.remove(paths, 0);
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
Assert.assertTrue(success[i], "Should succeed in remove " + msgId);
}
// test get what we removed
for (int i = 0; i < 10; i++) {
String msgId = "msg_" + i;
String path = PropertyPathConfig.getPath(PropertyType.MESSAGES, root, "host_1", msgId);
boolean pathExists = zkClient.exists(path);
Assert.assertFalse(pathExists, "Should be removed " + msgId);
}
zkClient.close();
System.out.println("END TestZkBaseDataAccessor.async at "
+ new Date(System.currentTimeMillis()));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.any23.extractor.html;
import org.apache.any23.extractor.ExtractionContext;
import org.apache.any23.extractor.ExtractionException;
import org.apache.any23.extractor.ExtractionParameters;
import org.apache.any23.extractor.ExtractionResultImpl;
import org.apache.any23.extractor.ExtractorFactory;
import org.apache.any23.rdf.RDFUtils;
import org.apache.any23.vocab.DCTerms;
import org.apache.any23.vocab.FOAF;
import org.apache.any23.vocab.Review;
import org.apache.any23.vocab.VCard;
import org.apache.any23.writer.RepositoryWriter;
import org.junit.Test;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.Value;
import org.openrdf.model.vocabulary.OWL;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.w3c.dom.Document;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
/**
* Reference Test class for various mixed extractors.
*
* @author Davide Palmisano (dpalmisano@gmail.com)
*
* @see GeoExtractor
* @see AdrExtractor
* @see HCardExtractor
* @see HReviewExtractor
*/
public class RDFMergerTest extends AbstractExtractorTestCase {
private static final DCTerms vDCTERMS = DCTerms.getInstance();
private static final FOAF vFOAF = FOAF.getInstance();
private static final Review vREVIEW = Review.getInstance();
private static final VCard vVCARD = VCard.getInstance();
@Override
protected ExtractorFactory<?> getExtractorFactory() {
return null;
}
@Test
public void testNoMicroformats() throws Exception, ExtractionException,
IOException {
extract("/html/html-without-uf.html");
assertModelEmpty();
}
@Test
public void test01XFNFoaf() throws Exception {
assertExtract("/html/mixed/01-xfn-foaf.html", false);
assertModelNotEmpty();
assertStatementsSize(RDF.TYPE, vVCARD.VCard, 1);
Resource vcard = findExactlyOneBlankSubject(RDF.TYPE, vVCARD.VCard);
RepositoryResult<Statement> statements = getStatements(null,
vFOAF.topic, vcard);
try {
while (statements.hasNext()) {
Statement statement = statements.next();
Resource person = statement.getSubject();
Resource blank = findExactlyOneBlankSubject(OWL.SAMEAS, person);
assertContains(blank, RDF.TYPE, vFOAF.Person);
}
} finally {
statements.close();
}
}
@Test
public void testAbbrTitleEverything() throws ExtractionException,
IOException, RepositoryException {
extractHCardAndRelated("/microformats/hcard/23-abbr-title-everything.html");
assertContains(vVCARD.fn, "John Doe");
assertContains(vVCARD.nickname, "JJ");
assertContains(vVCARD.given_name, "Jonathan");
assertContains(vVCARD.additional_name, "John");
assertContains(vVCARD.family_name, "Doe-Smith");
assertContains(vVCARD.honorific_suffix, "Medical Doctor");
assertContains(vVCARD.title, "President");
assertContains(vVCARD.role, "Chief");
assertContains(vVCARD.tz, "-0700");
assertContains(vVCARD.bday, "2006-04-04");
assertContains(vVCARD.tel, RDFUtils.uri("tel:415.555.1234"));
assertContains(vVCARD.uid, "abcdefghijklmnopqrstuvwxyz");
assertContains(vVCARD.class_, "public");
assertContains(vVCARD.note, "this is a note");
assertContains(vVCARD.organization_name, "Intellicorp");
assertContains(vVCARD.organization_unit, "Intelligence");
assertContains(RDF.TYPE, vVCARD.Location);
assertContains(vVCARD.geo, (Resource) null);
assertContains(vVCARD.latitude, "37.77");
assertContains(vVCARD.longitude, "-122.41");
assertContains(vVCARD.post_office_box, "Box 1234");
assertContains(vVCARD.extended_address, "Suite 100");
assertContains(vVCARD.street_address, "123 Fake Street");
assertContains(vVCARD.locality, "San Francisco");
assertContains(vVCARD.region, "California");
assertContains(vVCARD.postal_code, "12345-6789");
assertContains(vVCARD.country_name, "United States of America");
assertContains(vVCARD.addressType, "work");
}
@Test
public void testAdr() throws Exception {
extractHRevAndRelated("/microformats/hcard/22-adr.html");
assertStatementsSize(RDF.TYPE, vVCARD.Address, 4);
Map<String, String[]> addresses = new HashMap<String, String[]>(4);
addresses.put("1233 Main St.", new String[] {
"United States of America", "Beverly Hills", "90210",
"California" });
addresses.put("1232 Main St.", new String[] {
"United States of America", "Beverly Hills", "90210",
"California" });
addresses.put("1234 Main St.", new String[] {
"United States of America", "Beverly Hills", "90210",
"California" });
addresses.put("1231 Main St.", new String[] {
"United States of America", "Beverly Hills", "90210",
"California" });
addresses.put("Suite 100", new String[] { "United States of America",
"Beverly Hills", "90210", "California" });
RepositoryResult<Statement> statements = getStatements(null, RDF.TYPE,
vVCARD.Address);
try {
while (statements.hasNext()) {
Resource adr = statements.next().getSubject();
RepositoryResult<Statement> innerStatements = getStatements(
adr, vVCARD.street_address, null);
try {
while (innerStatements.hasNext()) {
Value innerValue = innerStatements.next().getObject();
assertContains(adr, vVCARD.country_name,
addresses.get(innerValue.stringValue())[0]);
assertContains(adr, vVCARD.locality,
addresses.get(innerValue.stringValue())[1]);
assertContains(adr, vVCARD.postal_code,
addresses.get(innerValue.stringValue())[2]);
assertContains(adr, vVCARD.region,
addresses.get(innerValue.stringValue())[3]);
}
} finally {
innerStatements.close();
}
}
} finally {
statements.close();
}
assertContains(vVCARD.post_office_box, "PO Box 1234");
assertContains(vVCARD.addressType, "home");
}
@Test
public void testGeoAbbr() throws ExtractionException, IOException,
RepositoryException {
extractHCardAndRelated("/microformats/hcard/25-geo-abbr.html");
assertModelNotEmpty();
assertContains(vVCARD.fn, "Paradise");
assertContains(RDF.TYPE, vVCARD.Organization);
assertContains(vVCARD.organization_name, "Paradise");
assertContains(RDF.TYPE, vVCARD.Location);
assertContains(vVCARD.geo, (Resource) null);
assertContains(vVCARD.latitude, "30.267991");
assertContains(vVCARD.longitude, "-97.739568");
}
@Test
public void testAncestors() throws ExtractionException, IOException,
RepositoryException {
extractHCardAndRelated("/microformats/hcard/26-ancestors.html");
assertModelNotEmpty();
assertContains(vVCARD.fn, "John Doe");
assertNotContains(null, vVCARD.fn,
"Mister Jonathan John Doe-Smith Medical Doctor");
assertContains(vVCARD.nickname, "JJ");
assertContains(RDF.TYPE, vVCARD.Address);
assertContains(vVCARD.tz, "-0700");
assertContains(vVCARD.title, "President");
assertContains(vVCARD.role, "Chief");
assertContains(vVCARD.organization_name, "Intellicorp");
assertContains(vVCARD.organization_unit, "Intelligence");
assertContains(vVCARD.tel, RDFUtils.uri("tel:415.555.1234"));
assertContains(vVCARD.uid, "abcdefghijklmnopqrstuvwxyz");
assertContains(vVCARD.note, "this is a note");
assertContains(vVCARD.class_, "public");
assertContains(RDF.TYPE, vVCARD.Location);
assertContains(vVCARD.geo, (Resource) null);
assertContains(null, vVCARD.latitude, "37.77");
assertContains(null, vVCARD.longitude, "-122.41");
assertContains(RDF.TYPE, vVCARD.Name);
assertContains(vVCARD.additional_name, "John");
assertContains(vVCARD.given_name, "Jonathan");
assertContains(vVCARD.family_name, "Doe-Smith");
assertContains(vVCARD.honorific_prefix, "Mister");
assertContains(vVCARD.honorific_suffix, "Medical Doctor");
assertContains(vVCARD.post_office_box, "Box 1234");
assertContains(vVCARD.extended_address, "Suite 100");
assertContains(vVCARD.street_address, "123 Fake Street");
assertContains(vVCARD.locality, "San Francisco");
assertContains(vVCARD.region, "California");
assertContains(vVCARD.postal_code, "12345-6789");
assertContains(vVCARD.country_name, "United States of America");
assertContains(vVCARD.addressType, "work");
}
@Test
public void testSingleton() throws Exception {
extractHCardAndRelated("/microformats/hcard/37-singleton.html");
assertModelNotEmpty();
assertStatementsSize(vVCARD.fn, (Value) null, 1);
assertContains(vVCARD.fn, "john doe 1");
assertStatementsSize(RDF.TYPE, vVCARD.Name, 1);
assertStatementsSize(vVCARD.given_name, (Value) null, 1);
assertContains(vVCARD.given_name, "john");
assertStatementsSize(vVCARD.family_name, (Value) null, 1);
assertContains(vVCARD.family_name, "doe");
assertStatementsSize(vVCARD.sort_string, (Value) null, 1);
assertContains(vVCARD.sort_string, "d");
assertStatementsSize(vVCARD.bday, (Value) null, 1);
assertContains(vVCARD.bday, "20060707");
assertStatementsSize(vVCARD.rev, (Value) null, 1);
assertContains(vVCARD.rev, "20060707");
assertStatementsSize(vVCARD.class_, (Value) null, 1);
assertContains(vVCARD.class_, "public");
assertStatementsSize(vVCARD.tz, (Value) null, 1);
assertContains(vVCARD.tz, "+0600");
// 2 uf, one of them outside the card
assertStatementsSize(RDF.TYPE, vVCARD.Location, 2);
// one is actually used
assertStatementsSize(vVCARD.geo, (Value) null, 2);
assertContains(vVCARD.latitude, "123.45");
assertContains(vVCARD.longitude, "67.89");
assertStatementsSize(vVCARD.uid, (Value) null, 1);
assertContains(vVCARD.uid, "unique-id-1");
}
@Test
public void test01Basic() throws Exception {
extractHRevAndRelated("/microformats/hreview/01-spec.html");
assertModelNotEmpty();
assertStatementsSize(RDF.TYPE, vREVIEW.Review, 1);
// reviewer, item
assertStatementsSize(RDF.TYPE, vVCARD.VCard, 2);
// there is one address in the item vcard
assertStatementsSize(RDF.TYPE, vVCARD.Address, 1);
RepositoryResult<Statement> reviews = getStatements(null, RDF.TYPE,
vREVIEW.Review);
try {
while (reviews.hasNext()) {
Resource review = reviews.next().getSubject();
assertContains(review, vREVIEW.rating, "5");
assertContains(review, vREVIEW.title,
"Crepes on Cole is awesome");
assertContains(review, vDCTERMS.date, "20050418T2300-0700");
assertContains(
vREVIEW.text,
"Crepes on Cole is one of the best little \n"
+ " creperies in San Francisco.\n "
+ "Excellent food and service. Plenty of tables in a variety of sizes\n"
+ " for parties large and small. "
+ "Window seating makes for excellent\n "
+ "people watching to/from the N-Judah which stops right outside.\n"
+ " I've had many fun social gatherings here, as well as gotten\n"
+ " plenty of work done thanks to neighborhood WiFi.");
assertContains(null, vREVIEW.hasReview, review);
}
} finally {
reviews.close();
}
// generic checks that vcards are correct, improve
assertContains(vVCARD.fn, "Crepes on Cole");
assertContains(vVCARD.fn, "Tantek");
assertContains(vVCARD.locality, "San Francisco");
assertContains(vVCARD.organization_name, "Crepes on Cole");
}
@Test
public void test02RatedTags() throws Exception {
extractHRevAndRelated("/microformats/hreview/02-spec-2.html");
assertStatementsSize(vREVIEW.reviewer, (Value) null, 1);
assertStatementsSize(vREVIEW.hasReview, (Value) null, 1);
assertModelNotEmpty();
assertStatementsSize(RDF.TYPE, vREVIEW.Review, 1);
// reviewer, item
assertStatementsSize(RDF.TYPE, vVCARD.VCard, 2);
// there is one address in the item vcard
assertStatementsSize(RDF.TYPE, vVCARD.Address, 1);
RepositoryResult<Statement> reviews = getStatements(null, RDF.TYPE,
vREVIEW.Review);
try {
while (reviews.hasNext()) {
Resource review = reviews.next().getSubject();
assertContains(review, vREVIEW.rating, "18");
assertContains(review, vREVIEW.title, "Cafe Borrone");
assertContains(review, vDCTERMS.date, "20050428T2130-0700");
assertContains(null, vREVIEW.hasReview, review);
assertContains(vREVIEW.type, "business");
}
} finally {
reviews.close();
}
// generic checks that vcards are correct, improve
assertContains(vVCARD.fn, "Cafe Borrone");
assertContains(vVCARD.fn, "anonymous");
assertContains(vVCARD.organization_name, "Cafe Borrone");
}
@Test
public void test03NoHcardForItem() throws Exception {
extractHRevAndRelated("/microformats/hreview/03-spec-3.html");
assertModelNotEmpty();
assertStatementsSize(RDF.TYPE, vREVIEW.Review, 1);
assertStatementsSize(RDF.TYPE, vVCARD.VCard, 1);
RepositoryResult<Statement> reviews = getStatements(null, RDF.TYPE,
vREVIEW.Review);
try {
while (reviews.hasNext()) {
Resource review = reviews.next().getSubject();
assertContains(review, vREVIEW.rating, "5");
assertNotContains(vREVIEW.title, null);
assertContains(review, vDCTERMS.date, "200502");
assertContains(
vREVIEW.text,
"\"The people thought they were just being rewarded for "
+ "treating others\n as they like to be treated, for "
+ "obeying stop signs and curing diseases,\n for mailing "
+ "letters with the address of the sender... Don't wake me,\n "
+ " I plan on sleeping in...\"\n \n \"Nothing Better\""
+ " is a great track on this album, too...");
RepositoryResult<Statement> whatHasAReview = getStatements(
null, vREVIEW.hasReview, review);
try {
while (whatHasAReview.hasNext()) {
Resource subject = whatHasAReview.next().getSubject();
assertContains(subject, vVCARD.fn,
"The Postal Service: Give Up");
assertContains(
subject,
vVCARD.url,
RDFUtils.uri("http://www.amazon.com/exec/obidos/ASIN/B000089CJI/"));
assertContains(
subject,
vVCARD.photo,
RDFUtils.uri("http://images.amazon.com/images/P/B000089CJI.01._SCTHUMBZZZ_.jpg"));
}
} finally {
whatHasAReview.close();
}
}
} finally {
reviews.close();
}
assertContains(vVCARD.fn, "Adam Rifkin");
assertContains(vVCARD.url, RDFUtils.uri("http://ifindkarma.com/blog/"));
}
@Override
protected void extract(String filename) throws ExtractionException,
IOException {
InputStream input = new BufferedInputStream(this.getClass()
.getResourceAsStream(filename));
Document document = new TagSoupParser(input, baseURI.stringValue())
.getDOM();
HCardExtractor hCardExtractor = new HCardExtractorFactory()
.createExtractor();
ExtractionContext hcExtractionContext = new ExtractionContext(
hCardExtractor.getDescription().getExtractorName(), baseURI);
hCardExtractor.run(ExtractionParameters.newDefault(),
hcExtractionContext, document, new ExtractionResultImpl(
hcExtractionContext, hCardExtractor,
new RepositoryWriter(getConnection())));
XFNExtractor xfnExtractor = new XFNExtractorFactory().createExtractor();
ExtractionContext xfnExtractionContext = new ExtractionContext(
xfnExtractor.getDescription().getExtractorName(), baseURI);
xfnExtractor.run(ExtractionParameters.newDefault(),
xfnExtractionContext, document, new ExtractionResultImpl(
xfnExtractionContext, hCardExtractor,
new RepositoryWriter(getConnection())));
}
private void extractHCardAndRelated(String filename) throws IOException,
ExtractionException {
InputStream input = new BufferedInputStream(this.getClass()
.getResourceAsStream(filename));
Document document = new TagSoupParser(input, baseURI.stringValue())
.getDOM();
HCardExtractor hCardExtractor = new HCardExtractorFactory()
.createExtractor();
ExtractionContext hCardExtractionContext = new ExtractionContext(
hCardExtractor.getDescription().getExtractorName(), baseURI);
hCardExtractor.run(ExtractionParameters.newDefault(),
hCardExtractionContext, document, new ExtractionResultImpl(
hCardExtractionContext, hCardExtractor,
new RepositoryWriter(getConnection())));
GeoExtractor geoExtractor = new GeoExtractorFactory().createExtractor();
ExtractionContext geoExtractionContext = new ExtractionContext(
geoExtractor.getDescription().getExtractorName(), baseURI);
geoExtractor.run(ExtractionParameters.newDefault(),
geoExtractionContext, document, new ExtractionResultImpl(
geoExtractionContext, geoExtractor,
new RepositoryWriter(getConnection())));
AdrExtractor adrExtractor = new AdrExtractorFactory().createExtractor();
ExtractionContext adrExtractionContext = new ExtractionContext(
adrExtractor.getDescription().getExtractorName(), baseURI);
adrExtractor.run(ExtractionParameters.newDefault(),
adrExtractionContext, document, new ExtractionResultImpl(
adrExtractionContext, adrExtractor,
new RepositoryWriter(getConnection())));
}
private void extractHRevAndRelated(String filename)
throws ExtractionException, IOException {
extractHCardAndRelated(filename);
InputStream input = new BufferedInputStream(this.getClass()
.getResourceAsStream(filename));
Document document = new TagSoupParser(input, baseURI.stringValue())
.getDOM();
HReviewExtractor hReviewExtractor = new HReviewExtractorFactory()
.createExtractor();
ExtractionContext hreviewExtractionContext = new ExtractionContext(
hReviewExtractor.getDescription().getExtractorName(), baseURI);
hReviewExtractor.run(ExtractionParameters.newDefault(),
hreviewExtractionContext, document, new ExtractionResultImpl(
hreviewExtractionContext, hReviewExtractor,
new RepositoryWriter(getConnection())));
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authz.store;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.ContextPreservingActionListener;
import org.elasticsearch.action.support.GroupedActionListener;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.core.security.ScrollHelper;
import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheAction;
import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheRequest;
import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheResponse;
import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor;
import org.elasticsearch.xpack.security.support.SecurityIndexManager;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Supplier;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING;
import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
import static org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor.DOC_TYPE_VALUE;
import static org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor.Fields.APPLICATION;
import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS;
/**
* {@code NativePrivilegeStore} is a store that reads/writes {@link ApplicationPrivilegeDescriptor} objects,
* from an Elasticsearch index.
*/
public class NativePrivilegeStore {
private static final Collector<Tuple<String, String>, ?, Map<String, List<String>>> TUPLES_TO_MAP = Collectors.toMap(
Tuple::v1,
t -> CollectionUtils.newSingletonArrayList(t.v2()), (a, b) -> {
a.addAll(b);
return a;
});
private static final Logger logger = LogManager.getLogger(NativePrivilegeStore.class);
private final Settings settings;
private final Client client;
private final SecurityIndexManager securityIndexManager;
public NativePrivilegeStore(Settings settings, Client client, SecurityIndexManager securityIndexManager) {
this.settings = settings;
this.client = client;
this.securityIndexManager = securityIndexManager;
}
public void getPrivileges(Collection<String> applications, Collection<String> names,
ActionListener<Collection<ApplicationPrivilegeDescriptor>> listener) {
final SecurityIndexManager frozenSecurityIndex = securityIndexManager.freeze();
if (frozenSecurityIndex.indexExists() == false) {
listener.onResponse(Collections.emptyList());
} else if (frozenSecurityIndex.isAvailable() == false) {
listener.onFailure(frozenSecurityIndex.getUnavailableReason());
} else if (isSinglePrivilegeMatch(applications, names)) {
getPrivilege(Objects.requireNonNull(Iterables.get(applications, 0)), Objects.requireNonNull(Iterables.get(names, 0)),
ActionListener.wrap(privilege ->
listener.onResponse(privilege == null ? Collections.emptyList() : Collections.singletonList(privilege)),
listener::onFailure));
} else {
securityIndexManager.checkIndexVersionThenExecute(listener::onFailure, () -> {
final QueryBuilder query;
final TermQueryBuilder typeQuery = QueryBuilders
.termQuery(ApplicationPrivilegeDescriptor.Fields.TYPE.getPreferredName(), DOC_TYPE_VALUE);
if (isEmpty(applications) && isEmpty(names)) {
query = typeQuery;
} else if (isEmpty(names)) {
query = QueryBuilders.boolQuery().filter(typeQuery).filter(getApplicationNameQuery(applications));
} else if (isEmpty(applications)) {
query = QueryBuilders.boolQuery().filter(typeQuery)
.filter(getPrivilegeNameQuery(names));
} else if (hasWildcard(applications)) {
query = QueryBuilders.boolQuery().filter(typeQuery)
.filter(getApplicationNameQuery(applications))
.filter(getPrivilegeNameQuery(names));
} else {
final String[] docIds = applications.stream()
.flatMap(a -> names.stream().map(n -> toDocId(a, n)))
.toArray(String[]::new);
query = QueryBuilders.boolQuery().filter(typeQuery).filter(QueryBuilders.idsQuery().addIds(docIds));
}
final Supplier<ThreadContext.StoredContext> supplier = client.threadPool().getThreadContext().newRestorableContext(false);
try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(SECURITY_ORIGIN)) {
SearchRequest request = client.prepareSearch(SECURITY_MAIN_ALIAS)
.setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings))
.setQuery(query)
.setSize(1000)
.setFetchSource(true)
.request();
logger.trace(() ->
new ParameterizedMessage("Searching for privileges [{}] with query [{}]", names, Strings.toString(query)));
request.indicesOptions().ignoreUnavailable();
ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener),
hit -> buildPrivilege(hit.getId(), hit.getSourceRef()));
}
});
}
}
private boolean isSinglePrivilegeMatch(Collection<String> applications, Collection<String> names) {
return applications != null && applications.size() == 1 && hasWildcard(applications) == false && names != null && names.size() == 1;
}
private boolean hasWildcard(Collection<String> applications) {
return applications.stream().anyMatch(n -> n.endsWith("*"));
}
private QueryBuilder getPrivilegeNameQuery(Collection<String> names) {
return QueryBuilders.termsQuery(ApplicationPrivilegeDescriptor.Fields.NAME.getPreferredName(), names);
}
private QueryBuilder getApplicationNameQuery(Collection<String> applications) {
if (applications.contains("*")) {
return QueryBuilders.existsQuery(APPLICATION.getPreferredName());
}
final List<String> rawNames = new ArrayList<>(applications.size());
final List<String> wildcardNames = new ArrayList<>(applications.size());
for (String name : applications) {
if (name.endsWith("*")) {
wildcardNames.add(name);
} else {
rawNames.add(name);
}
}
assert rawNames.isEmpty() == false || wildcardNames.isEmpty() == false;
TermsQueryBuilder termsQuery = rawNames.isEmpty() ? null : QueryBuilders.termsQuery(APPLICATION.getPreferredName(), rawNames);
if (wildcardNames.isEmpty()) {
return termsQuery;
}
final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery();
if (termsQuery != null) {
boolQuery.should(termsQuery);
}
for (String wildcard : wildcardNames) {
final String prefix = wildcard.substring(0, wildcard.length() - 1);
boolQuery.should(QueryBuilders.prefixQuery(APPLICATION.getPreferredName(), prefix));
}
boolQuery.minimumShouldMatch(1);
return boolQuery;
}
private static boolean isEmpty(Collection<String> collection) {
return collection == null || collection.isEmpty();
}
void getPrivilege(String application, String name, ActionListener<ApplicationPrivilegeDescriptor> listener) {
final SecurityIndexManager frozenSecurityIndex = securityIndexManager.freeze();
if (frozenSecurityIndex.isAvailable() == false) {
logger.warn(new ParameterizedMessage("failed to load privilege [{}] index not available", name),
frozenSecurityIndex.getUnavailableReason());
listener.onResponse(null);
} else {
securityIndexManager.checkIndexVersionThenExecute(listener::onFailure,
() -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareGet(SECURITY_MAIN_ALIAS, toDocId(application, name))
.request(),
new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse response) {
if (response.isExists()) {
listener.onResponse(buildPrivilege(response.getId(), response.getSourceAsBytesRef()));
} else {
listener.onResponse(null);
}
}
@Override
public void onFailure(Exception e) {
// if the index or the shard is not there / available we just claim the privilege is not there
if (TransportActions.isShardNotAvailableException(e)) {
logger.warn(new ParameterizedMessage("failed to load privilege [{}] index not available", name), e);
listener.onResponse(null);
} else {
logger.error(new ParameterizedMessage("failed to load privilege [{}]", name), e);
listener.onFailure(e);
}
}
},
client::get));
}
}
public void putPrivileges(Collection<ApplicationPrivilegeDescriptor> privileges, WriteRequest.RefreshPolicy refreshPolicy,
ActionListener<Map<String, List<String>>> listener) {
securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
ActionListener<IndexResponse> groupListener = new GroupedActionListener<>(
ActionListener.wrap((Collection<IndexResponse> responses) -> {
final Map<String, List<String>> createdNames = responses.stream()
.filter(r -> r.getResult() == DocWriteResponse.Result.CREATED)
.map(r -> r.getId())
.map(NativePrivilegeStore::nameFromDocId)
.collect(TUPLES_TO_MAP);
clearRolesCache(listener, createdNames);
}, listener::onFailure), privileges.size());
for (ApplicationPrivilegeDescriptor privilege : privileges) {
innerPutPrivilege(privilege, refreshPolicy, groupListener);
}
});
}
private void innerPutPrivilege(ApplicationPrivilegeDescriptor privilege, WriteRequest.RefreshPolicy refreshPolicy,
ActionListener<IndexResponse> listener) {
try {
final String name = privilege.getName();
final XContentBuilder xContentBuilder = privilege.toXContent(jsonBuilder(), true);
ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareIndex(SECURITY_MAIN_ALIAS, SINGLE_MAPPING_NAME, toDocId(privilege.getApplication(), name))
.setSource(xContentBuilder)
.setRefreshPolicy(refreshPolicy)
.request(), listener, client::index);
} catch (Exception e) {
logger.warn("Failed to put privilege {} - {}", Strings.toString(privilege), e.toString());
listener.onFailure(e);
}
}
public void deletePrivileges(String application, Collection<String> names, WriteRequest.RefreshPolicy refreshPolicy,
ActionListener<Map<String, List<String>>> listener) {
final SecurityIndexManager frozenSecurityIndex = securityIndexManager.freeze();
if (frozenSecurityIndex.indexExists() == false) {
listener.onResponse(Collections.emptyMap());
} else if (frozenSecurityIndex.isAvailable() == false) {
listener.onFailure(frozenSecurityIndex.getUnavailableReason());
} else {
securityIndexManager.checkIndexVersionThenExecute(listener::onFailure, () -> {
ActionListener<DeleteResponse> groupListener = new GroupedActionListener<>(
ActionListener.wrap(responses -> {
final Map<String, List<String>> deletedNames = responses.stream()
.filter(r -> r.getResult() == DocWriteResponse.Result.DELETED)
.map(r -> r.getId())
.map(NativePrivilegeStore::nameFromDocId)
.collect(TUPLES_TO_MAP);
clearRolesCache(listener, deletedNames);
}, listener::onFailure), names.size());
for (String name : names) {
ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareDelete(SECURITY_MAIN_ALIAS, SINGLE_MAPPING_NAME, toDocId(application, name))
.setRefreshPolicy(refreshPolicy)
.request(), groupListener, client::delete);
}
});
}
}
private <T> void clearRolesCache(ActionListener<T> listener, T value) {
// This currently clears _all_ roles, but could be improved to clear only those roles that reference the affected application
ClearRolesCacheRequest request = new ClearRolesCacheRequest();
executeAsyncWithOrigin(client, SECURITY_ORIGIN, ClearRolesCacheAction.INSTANCE, request,
new ActionListener<>() {
@Override
public void onResponse(ClearRolesCacheResponse nodes) {
listener.onResponse(value);
}
@Override
public void onFailure(Exception e) {
logger.error("unable to clear role cache", e);
listener.onFailure(
new ElasticsearchException("clearing the role cache failed. please clear the role cache manually", e));
}
});
}
private ApplicationPrivilegeDescriptor buildPrivilege(String docId, BytesReference source) {
logger.trace("Building privilege from [{}] [{}]", docId, source == null ? "<<null>>" : source.utf8ToString());
if (source == null) {
return null;
}
final Tuple<String, String> name = nameFromDocId(docId);
try {
// EMPTY is safe here because we never use namedObject
try (StreamInput input = source.streamInput();
XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, input)) {
final ApplicationPrivilegeDescriptor privilege = ApplicationPrivilegeDescriptor.parse(parser, null, null, true);
assert privilege.getApplication().equals(name.v1())
: "Incorrect application name for privilege. Expected [" + name.v1() + "] but was " + privilege.getApplication();
assert privilege.getName().equals(name.v2())
: "Incorrect name for application privilege. Expected [" + name.v2() + "] but was " + privilege.getName();
return privilege;
}
} catch (IOException | XContentParseException e) {
logger.error(new ParameterizedMessage("cannot parse application privilege [{}]", name), e);
return null;
}
}
private static Tuple<String, String> nameFromDocId(String docId) {
final String name = docId.substring(DOC_TYPE_VALUE.length() + 1);
assert name != null && name.length() > 0 : "Invalid name '" + name + "'";
final int colon = name.indexOf(':');
assert colon > 0 : "Invalid name '" + name + "' (missing colon)";
return new Tuple<>(name.substring(0, colon), name.substring(colon + 1));
}
private static String toDocId(String application, String name) {
return DOC_TYPE_VALUE + "_" + application + ":" + name;
}
}
| |
package com.vaadin.tests.server.component.absolutelayout;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import org.junit.Test;
import com.vaadin.server.Sizeable;
import com.vaadin.server.Sizeable.Unit;
import com.vaadin.ui.AbsoluteLayout;
import com.vaadin.ui.Button;
public class ComponentPositionTest {
private static final String CSS = "top:7.0px;right:7.0%;bottom:7.0pc;left:7.0em;z-index:7;";
private static final String PARTIAL_CSS = "top:7.0px;left:7.0em;";
private static final Float CSS_VALUE = Float.valueOf(7);
private static final Unit UNIT_UNSET = Sizeable.Unit.PIXELS;
/**
* Add component w/o giving positions, assert that everything is unset
*/
@Test
public void testNoPosition() {
AbsoluteLayout layout = new AbsoluteLayout();
Button b = new Button();
layout.addComponent(b);
assertNull(layout.getPosition(b).getTopValue());
assertNull(layout.getPosition(b).getBottomValue());
assertNull(layout.getPosition(b).getLeftValue());
assertNull(layout.getPosition(b).getRightValue());
assertEquals(UNIT_UNSET, layout.getPosition(b).getTopUnits());
assertEquals(UNIT_UNSET, layout.getPosition(b).getBottomUnits());
assertEquals(UNIT_UNSET, layout.getPosition(b).getLeftUnits());
assertEquals(UNIT_UNSET, layout.getPosition(b).getRightUnits());
assertEquals(-1, layout.getPosition(b).getZIndex());
assertEquals("", layout.getPosition(b).getCSSString());
}
/**
* Add component, setting all attributes using CSS, assert getter agree
*/
@Test
public void testFullCss() {
AbsoluteLayout layout = new AbsoluteLayout();
Button b = new Button();
layout.addComponent(b, CSS);
assertEquals(CSS_VALUE, layout.getPosition(b).getTopValue());
assertEquals(CSS_VALUE, layout.getPosition(b).getBottomValue());
assertEquals(CSS_VALUE, layout.getPosition(b).getLeftValue());
assertEquals(CSS_VALUE, layout.getPosition(b).getRightValue());
assertEquals(Sizeable.Unit.PIXELS, layout.getPosition(b).getTopUnits());
assertEquals(Sizeable.Unit.PICAS, layout.getPosition(b)
.getBottomUnits());
assertEquals(Sizeable.Unit.EM, layout.getPosition(b).getLeftUnits());
assertEquals(Sizeable.Unit.PERCENTAGE, layout.getPosition(b)
.getRightUnits());
assertEquals(7, layout.getPosition(b).getZIndex());
assertEquals(CSS, layout.getPosition(b).getCSSString());
}
/**
* Add component, setting some attributes using CSS, assert getters agree
*/
@Test
public void testPartialCss() {
AbsoluteLayout layout = new AbsoluteLayout();
Button b = new Button();
layout.addComponent(b, PARTIAL_CSS);
assertEquals(CSS_VALUE, layout.getPosition(b).getTopValue());
assertNull(layout.getPosition(b).getBottomValue());
assertEquals(CSS_VALUE, layout.getPosition(b).getLeftValue());
assertNull(layout.getPosition(b).getRightValue());
assertEquals(Sizeable.Unit.PIXELS, layout.getPosition(b).getTopUnits());
assertEquals(UNIT_UNSET, layout.getPosition(b).getBottomUnits());
assertEquals(Sizeable.Unit.EM, layout.getPosition(b).getLeftUnits());
assertEquals(UNIT_UNSET, layout.getPosition(b).getRightUnits());
assertEquals(-1, layout.getPosition(b).getZIndex());
assertEquals(PARTIAL_CSS, layout.getPosition(b).getCSSString());
}
/**
* Add component setting all attributes using CSS, then reset using partial
* CSS; assert getters agree and the appropriate attributes are unset.
*/
@Test
public void testPartialCssReset() {
AbsoluteLayout layout = new AbsoluteLayout();
Button b = new Button();
layout.addComponent(b, CSS);
layout.getPosition(b).setCSSString(PARTIAL_CSS);
assertEquals(CSS_VALUE, layout.getPosition(b).getTopValue());
assertNull(layout.getPosition(b).getBottomValue());
assertEquals(CSS_VALUE, layout.getPosition(b).getLeftValue());
assertNull(layout.getPosition(b).getRightValue());
assertEquals(Sizeable.Unit.PIXELS, layout.getPosition(b).getTopUnits());
assertEquals(UNIT_UNSET, layout.getPosition(b).getBottomUnits());
assertEquals(Sizeable.Unit.EM, layout.getPosition(b).getLeftUnits());
assertEquals(UNIT_UNSET, layout.getPosition(b).getRightUnits());
assertEquals(-1, layout.getPosition(b).getZIndex());
assertEquals(PARTIAL_CSS, layout.getPosition(b).getCSSString());
}
/**
* Add component, then set all position attributes with individual setters
* for value and units; assert getters agree.
*/
@Test
public void testSetPosition() {
final Float SIZE = Float.valueOf(12);
AbsoluteLayout layout = new AbsoluteLayout();
Button b = new Button();
layout.addComponent(b);
layout.getPosition(b).setTopValue(SIZE);
layout.getPosition(b).setRightValue(SIZE);
layout.getPosition(b).setBottomValue(SIZE);
layout.getPosition(b).setLeftValue(SIZE);
layout.getPosition(b).setTopUnits(Sizeable.Unit.CM);
layout.getPosition(b).setRightUnits(Sizeable.Unit.EX);
layout.getPosition(b).setBottomUnits(Sizeable.Unit.INCH);
layout.getPosition(b).setLeftUnits(Sizeable.Unit.MM);
assertEquals(SIZE, layout.getPosition(b).getTopValue());
assertEquals(SIZE, layout.getPosition(b).getRightValue());
assertEquals(SIZE, layout.getPosition(b).getBottomValue());
assertEquals(SIZE, layout.getPosition(b).getLeftValue());
assertEquals(Sizeable.Unit.CM, layout.getPosition(b).getTopUnits());
assertEquals(Sizeable.Unit.EX, layout.getPosition(b).getRightUnits());
assertEquals(Sizeable.Unit.INCH, layout.getPosition(b).getBottomUnits());
assertEquals(Sizeable.Unit.MM, layout.getPosition(b).getLeftUnits());
}
/**
* Add component, then set all position attributes with combined setters for
* value and units; assert getters agree.
*/
@Test
public void testSetPosition2() {
final Float SIZE = Float.valueOf(12);
AbsoluteLayout layout = new AbsoluteLayout();
Button b = new Button();
layout.addComponent(b);
layout.getPosition(b).setTop(SIZE, Sizeable.Unit.CM);
layout.getPosition(b).setRight(SIZE, Sizeable.Unit.EX);
layout.getPosition(b).setBottom(SIZE, Sizeable.Unit.INCH);
layout.getPosition(b).setLeft(SIZE, Sizeable.Unit.MM);
assertEquals(SIZE, layout.getPosition(b).getTopValue());
assertEquals(SIZE, layout.getPosition(b).getRightValue());
assertEquals(SIZE, layout.getPosition(b).getBottomValue());
assertEquals(SIZE, layout.getPosition(b).getLeftValue());
assertEquals(Sizeable.Unit.CM, layout.getPosition(b).getTopUnits());
assertEquals(Sizeable.Unit.EX, layout.getPosition(b).getRightUnits());
assertEquals(Sizeable.Unit.INCH, layout.getPosition(b).getBottomUnits());
assertEquals(Sizeable.Unit.MM, layout.getPosition(b).getLeftUnits());
}
/**
* Add component, set all attributes using CSS, unset some using method
* calls, assert getters agree.
*/
@Test
public void testUnsetPosition() {
AbsoluteLayout layout = new AbsoluteLayout();
Button b = new Button();
layout.addComponent(b, CSS);
layout.getPosition(b).setTopValue(null);
layout.getPosition(b).setRightValue(null);
layout.getPosition(b).setBottomValue(null);
layout.getPosition(b).setLeftValue(null);
layout.getPosition(b).setZIndex(-1);
assertNull(layout.getPosition(b).getTopValue());
assertNull(layout.getPosition(b).getBottomValue());
assertNull(layout.getPosition(b).getLeftValue());
assertNull(layout.getPosition(b).getRightValue());
assertEquals("", layout.getPosition(b).getCSSString());
}
}
| |
/*
* Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.vr.sdk.samples.video360;
import android.app.ActivityManager;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.SurfaceTexture;
import android.graphics.SurfaceTexture.OnFrameAvailableListener;
import android.media.MediaPlayer;
import android.support.annotation.AnyThread;
import android.support.annotation.MainThread;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.ContextThemeWrapper;
import android.view.InputDevice;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.SeekBar;
import android.widget.TextView;
import com.google.vr.sdk.samples.video360.rendering.CanvasQuad;
/**
* Contains a UI that can be part of a standard 2D Android Activity or a VR Activity.
*
* <p>For 2D Activities, this View behaves like any other Android View. It receives events from the
* media player, updates the UI, and forwards user input to the appropriate component. In VR
* Activities, this View uses standard Android APIs to render its child Views to a texture that is
* displayed in VR. It also receives events from the Daydream Controller and forwards them to its
* child views.
*/
public class VideoUiView extends LinearLayout {
// These UI elements are only useful when the app is displaying a video.
private SeekBar seekBar;
private TextView statusText;
private final UiUpdater uiUpdater = new UiUpdater();
// Since MediaPlayer lacks synchronization for internal events, it should only be accessed on the
// main thread.
@Nullable
private MediaPlayer mediaPlayer;
// The canvasQuad is only not null when this View is in a VR Activity. It provides the backing
// canvas that standard Android child Views render to.
@Nullable
private CanvasQuad canvasQuad;
/** Creates this View using standard XML inflation. */
public VideoUiView(Context context, AttributeSet attrs) {
super(context, attrs);
}
/**
* Creates this view for use in a VR scene.
*
* @param context the context used to set this View's theme
* @param parent a parent view this view will be attached to such as the Activity's root View
* @param quad the floating quad in the VR scene that will render this View
*/
@MainThread
public static VideoUiView createForOpenGl(Context context, ViewGroup parent, CanvasQuad quad) {
// If a custom theme isn't specified, the Context's theme is used. For VR Activities, this is
// the old Android default theme rather than a modern theme. Override this with a custom theme.
Context theme = new ContextThemeWrapper(context, R.style.VrTheme);
VideoUiView view = (VideoUiView) View.inflate(theme, R.layout.video_ui, null);
view.canvasQuad = quad;
view.setLayoutParams(CanvasQuad.getLayoutParams());
view.setVisibility(View.VISIBLE);
parent.addView(view, 0);
view.findViewById(R.id.enter_exit_vr).setContentDescription(
view.getResources().getString(R.string.exit_vr_label));
return view;
}
/**
* Binds the media player in order to update video position if the Activity is showing a video.
* This is also used to clear the bound mediaPlayer when the Activity exits to avoid trying to
* access the mediaPlayer while it is in an invalid state.
*/
@MainThread
public void setMediaPlayer(MediaPlayer mediaPlayer) {
this.mediaPlayer = mediaPlayer;
postInvalidate();
}
/** Ignores 2D touch events when this View is used in a VR Activity. */
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
if (canvasQuad == null) {
// Not in VR mode so use standard behavior.
return super.onInterceptTouchEvent(event);
}
if (ActivityManager.isRunningInTestHarness()) {
// If your app uses UI Automator tests, it's useful to have this touch system handle touch
// events created during tests. This allows you to create UI tests that work while the app
// is in VR.
return false;
}
// We are in VR mode. Synthetic events generated by SceneRenderer are marked as SOURCE_GAMEPAD
// events. For this class of events, we will let the Android Touch system handle the event so we
// return false. Other classes of events were generated by the user accidentally touching the
// screen where this hidden view is attached.
if (event.getSource() != InputDevice.SOURCE_GAMEPAD) {
// Intercept and suppress touchscreen events so child buttons aren't clicked.
return true;
} else {
// Don't intercept SOURCE_GAMEPAD events. onTouchEvent will handle these.
return false;
}
}
/** Handles standard Android touch events or synthetic VR events. */
@Override
public boolean onTouchEvent(MotionEvent event) {
if (canvasQuad != null) {
// In VR mode so process controller events & ignore touchscreen events.
if (event.getSource() != InputDevice.SOURCE_GAMEPAD) {
// Tell the system that we handled the event. This prevents children from seeing the event.
return true;
} else {
// Have the system send the event to child Views and they will handle clicks.
return super.onTouchEvent(event);
}
} else {
// Not in VR mode so use standard behavior.
return super.onTouchEvent(event);
}
}
/** Installs the View's event handlers. */
@Override
public void onFinishInflate() {
super.onFinishInflate();
final ImageButton playPauseToggle = (ImageButton) findViewById(R.id.play_pause_toggle);
playPauseToggle.setOnClickListener(
new OnClickListener() {
@Override
public void onClick(View v) {
if (mediaPlayer == null) {
return;
}
if (mediaPlayer.isPlaying()) {
mediaPlayer.pause();
playPauseToggle.setBackgroundResource(R.drawable.play_button);
playPauseToggle.setContentDescription(getResources().getString(R.string.play_label));
} else {
mediaPlayer.start();
playPauseToggle.setBackgroundResource(R.drawable.pause_button);
playPauseToggle.setContentDescription(getResources().getString(R.string.pause_label));
}
}
});
seekBar = (SeekBar) findViewById(R.id.seek_bar);
seekBar.setOnSeekBarChangeListener(new SeekBarListener());
statusText = (TextView) findViewById(R.id.status_text);
}
/** Sets the OnClickListener used to switch Activities. */
@MainThread
public void setVrIconClickListener(OnClickListener listener) {
ImageButton vrIcon = (ImageButton) findViewById(R.id.enter_exit_vr);
vrIcon.setOnClickListener(listener);
}
/**
* Renders this View and its children to either Android View hierarchy's Canvas or to the VR
* scene's CanvasQuad.
*
* @param androidUiCanvas used in 2D mode to render children to the screen
*/
@Override
public void dispatchDraw(Canvas androidUiCanvas) {
if (canvasQuad == null) {
// Handle non-VR rendering.
super.dispatchDraw(androidUiCanvas);
return;
}
// Handle VR rendering.
Canvas glCanvas = canvasQuad.lockCanvas();
if (glCanvas == null) {
// This happens if Android tries to draw this View before GL initialization completes. We need
// to retry until the draw call happens after GL invalidation.
postInvalidate();
return;
}
// Clear the canvas first.
glCanvas.drawColor(Color.BLACK);
// Have Android render the child views.
super.dispatchDraw(glCanvas);
// Commit the changes.
canvasQuad.unlockCanvasAndPost(glCanvas);
}
/**
* Gets the listener used to update the seek bar's position on each new video frame.
*
* @return a listener that can be passed to
* {@link SurfaceTexture#setOnFrameAvailableListener(OnFrameAvailableListener)}
*/
public SurfaceTexture.OnFrameAvailableListener getFrameListener() {
return uiUpdater;
}
/** Updates the seek bar and status text. */
private final class UiUpdater implements SurfaceTexture.OnFrameAvailableListener {
private int videoDurationMs = 0;
// onFrameAvailable is called on an arbitrary thread, but we can only access mediaPlayer on the
// main thread.
private Runnable uiThreadUpdater = new Runnable() {
@Override
public void run() {
if (mediaPlayer == null) {
return;
}
if (videoDurationMs == 0) {
videoDurationMs = mediaPlayer.getDuration();
seekBar.setMax(videoDurationMs);
}
int positionMs = mediaPlayer.getCurrentPosition();
seekBar.setProgress(positionMs);
StringBuilder status = new StringBuilder();
status.append(String.format("%.2f", positionMs / 1000f));
status.append(" / ");
status.append(videoDurationMs / 1000);
statusText.setText(status.toString());
if (canvasQuad != null) {
// When in VR, we will need to manually invalidate this View.
invalidate();
}
}
};
@AnyThread
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
post(uiThreadUpdater);
}
}
/** Handles the user seeking to a new position in the video. */
private final class SeekBarListener implements SeekBar.OnSeekBarChangeListener {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (fromUser && mediaPlayer != null) {
mediaPlayer.seekTo(progress);
} // else this was from the ActivityEventHandler.onNewFrame()'s seekBar.setProgress update.
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.step;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.ResultFile;
import org.pentaho.di.core.RowSet;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.logging.HasLogChannelInterface;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.step.BaseStepData.StepExecutionStatus;
import org.pentaho.metastore.api.IMetaStore;
/**
* The interface that any transformation step or plugin needs to implement.
*
* Created on 12-AUG-2004
*
* @author Matt
*
*/
public interface StepInterface extends VariableSpace, HasLogChannelInterface {
/**
* @return the transformation that is executing this step
*/
public Trans getTrans();
/**
* Perform the equivalent of processing one row. Typically this means reading a row from input (getRow()) and passing
* a row to output (putRow)).
*
* @param smi
* The steps metadata to work with
* @param sdi
* The steps temporary working data to work with (database connections, result sets, caches, temporary
* variables, etc.)
* @return false if no more rows can be processed or an error occurred.
* @throws KettleException
*/
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException;
/**
* This method checks if the step is capable of processing at least one row.
* <p>
* For example, if a step has no input records but needs at least one to function, it will return false.
*
* @return true if the step can process a row.
*
*/
public boolean canProcessOneRow();
/**
* Initialize and do work where other steps need to wait for...
*
* @param stepMetaInterface
* The metadata to work with
* @param stepDataInterface
* The data to initialize
*/
public boolean init( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface );
/**
* Dispose of this step: close files, empty logs, etc.
*
* @param sii
* The metadata to work with
* @param sdi
* The data to dispose of
*/
public void dispose( StepMetaInterface sii, StepDataInterface sdi );
/**
* Mark the start time of the step.
*
*/
public void markStart();
/**
* Mark the end time of the step.
*
*/
public void markStop();
/**
* Stop running operations...
*
* @param stepMetaInterface
* The metadata that might be needed by the step to stop running.
* @param stepDataInterface
* The interface to the step data containing the connections, resultsets, open files, etc.
*
*/
public void stopRunning( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface ) throws KettleException;
/**
* @return true if the step is running after having been initialized
*/
public boolean isRunning();
/**
* Flag the step as running or not
*
* @param running
* the running flag to set
*/
public void setRunning( boolean running );
/**
* @return True if the step is marked as stopped. Execution should stop immediate.
*/
public boolean isStopped();
/**
* @param stopped
* true if the step needs to be stopped
*/
public void setStopped( boolean stopped );
/**
* @param stopped
* true if the step needs to be safe stopped
*/
default void setSafeStopped( boolean stopped ) {
}
/**
* @return true if step is safe stopped.
*/
default boolean isSafeStopped() {
return false;
}
/**
* @return True if the step is paused
*/
public boolean isPaused();
/**
* Flags all rowsets as stopped/completed/finished.
*/
public void stopAll();
/**
* Pause a running step
*/
public void pauseRunning();
/**
* Resume a running step
*/
public void resumeRunning();
/**
* Get the name of the step.
*
* @return the name of the step
*/
public String getStepname();
/**
* @return The steps copy number (default 0)
*/
public int getCopy();
/**
* @return the type ID of the step...
*/
public String getStepID();
/**
* Get the number of errors
*
* @return the number of errors
*/
public long getErrors();
/**
* Sets the number of errors
*
* @param errors
* the number of errors to set
*/
public void setErrors( long errors );
/**
* @return Returns the linesInput.
*/
public long getLinesInput();
/**
* @return Returns the linesOutput.
*/
public long getLinesOutput();
/**
* @return Returns the linesRead.
*/
public long getLinesRead();
/**
* @return Returns the linesWritten.
*/
public long getLinesWritten();
/**
* @return Returns the linesUpdated.
*/
public long getLinesUpdated();
/**
* @param linesRejected
* steps the lines rejected by error handling.
*/
public void setLinesRejected( long linesRejected );
/**
* @return Returns the lines rejected by error handling.
*/
public long getLinesRejected();
/**
* Put a row on the destination rowsets.
*
* @param row
* The row to send to the destinations steps
*/
public void putRow( RowMetaInterface row, Object[] data ) throws KettleException;
/**
* @return a row from the source step(s).
*/
public Object[] getRow() throws KettleException;
/**
* Signal output done to destination steps
*/
public void setOutputDone();
/**
* Add a rowlistener to the step allowing you to inspect (or manipulate, be careful) the rows coming in or exiting the
* step.
*
* @param rowListener
* the rowlistener to add
*/
public void addRowListener( RowListener rowListener );
/**
* Remove a rowlistener from this step.
*
* @param rowListener
* the rowlistener to remove
*/
public void removeRowListener( RowListener rowListener );
/**
* @return a list of the installed RowListeners
*/
public List<RowListener> getRowListeners();
/**
* @return The list of active input rowsets for the step
*/
public List<RowSet> getInputRowSets();
/**
* @return The list of active output rowsets for the step
*/
public List<RowSet> getOutputRowSets();
/**
* @return true if the step is running partitioned
*/
public boolean isPartitioned();
/**
* @param partitionID
* the partitionID to set
*/
public void setPartitionID( String partitionID );
/**
* @return the steps partition ID
*/
public String getPartitionID();
/**
* Call this method typically, after ALL the slave transformations in a clustered run have finished.
*/
public void cleanup();
/**
* This method is executed by Trans right before the threads start and right after initialization.<br>
* <br>
* <b>!!! A plugin implementing this method should make sure to also call <i>super.initBeforeStart();</i> !!!</b>
*
* @throws KettleStepException
* In case there is an error
*/
public void initBeforeStart() throws KettleStepException;
/**
* Attach a step listener to be notified when a step arrives in a certain state. (finished)
*
* @param stepListener
* The listener to add to the step
*/
public void addStepListener( StepListener stepListener );
/**
* @return true if the thread is a special mapping step
*/
public boolean isMapping();
/**
* @return The metadata for this step
*/
public StepMeta getStepMeta();
/**
* @return the logging channel for this step
*/
public LogChannelInterface getLogChannel();
/**
* @param usingThreadPriorityManagment
* set to true to actively manage priorities of step threads
*/
public void setUsingThreadPriorityManagment( boolean usingThreadPriorityManagment );
/**
* @return true if we are actively managing priorities of step threads
*/
public boolean isUsingThreadPriorityManagment();
/**
* @return The total amount of rows in the input buffers
*/
public int rowsetInputSize();
/**
* @return The total amount of rows in the output buffers
*/
public int rowsetOutputSize();
/**
* @return The number of "processed" lines of a step. Well, a representable metric for that anyway.
*/
public long getProcessed();
/**
* @return The result files for this step
*/
public Map<String, ResultFile> getResultFiles();
/**
* @return the description as in {@link StepDataInterface}
*/
public StepExecutionStatus getStatus();
/**
* @return The number of ms that this step has been running
*/
public long getRuntime();
/**
* To be used to flag an error output channel of a step prior to execution for performance reasons.
*/
public void identifyErrorOutput();
/**
* @param partitioned
* true if this step is partitioned
*/
public void setPartitioned( boolean partitioned );
/**
* @param partitioningMethod
* The repartitioning method
*/
public void setRepartitioning( int partitioningMethod );
/**
* Calling this method will alert the step that we finished passing a batch of records to the step. Specifically for
* steps like "Sort Rows" it means that the buffered rows can be sorted and passed on.
*
* @throws KettleException
* In case an error occurs during the processing of the batch of rows.
*/
public void batchComplete() throws KettleException;
/**
* Pass along the metastore to use when loading external elements at runtime.
*
* @param metaStore
* The metastore to use
*/
public void setMetaStore( IMetaStore metaStore );
/**
* @return The metastore that the step uses to load external elements from.
*/
public IMetaStore getMetaStore();
/**
* @param repository
* The repository used by the step to load and reference Kettle objects with at runtime
*/
public void setRepository( Repository repository );
/**
* @return The repository used by the step to load and reference Kettle objects with at runtime
*/
public Repository getRepository();
/**
* @return the index of the active (current) output row set
*/
public int getCurrentOutputRowSetNr();
/**
* @param index
* Sets the index of the active (current) output row set to use.
*/
public void setCurrentOutputRowSetNr( int index );
/**
* @return the index of the active (current) input row set
*/
public int getCurrentInputRowSetNr();
/**
* @param index
* Sets the index of the active (current) input row set to use.
*/
public void setCurrentInputRowSetNr( int index );
default Collection<StepStatus> subStatuses() {
return Collections.emptyList();
}
default void addRowSetToInputRowSets( RowSet rowSet ) {
getInputRowSets().add( rowSet );
}
default void addRowSetToOutputRowSets( RowSet rowSet ) {
getOutputRowSets().add( rowSet );
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.protocol.stomp;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.ActiveMQBuffers;
/**
* Represents all the data in a STOMP frame.
*/
public class StompFrame
{
private static final byte[] END_OF_FRAME = new byte[]{0, '\n'};
protected final String command;
protected final Map<String, String> headers;
private String body;
protected byte[] bytesBody;
protected ActiveMQBuffer buffer = null;
protected int size;
private boolean disconnect;
private boolean isPing;
public StompFrame(String command)
{
this(command, false);
}
public StompFrame(String command, boolean disconnect)
{
this.command = command;
this.headers = new LinkedHashMap<String, String>();
this.disconnect = disconnect;
}
public StompFrame(String command, Map<String, String> headers,
byte[] content)
{
this.command = command;
this.headers = headers;
this.bytesBody = content;
}
public String getCommand()
{
return command;
}
public int getEncodedSize() throws Exception
{
if (buffer == null)
{
buffer = toActiveMQBuffer();
}
return size;
}
@Override
public String toString()
{
return "StompFrame[command=" + command + ", headers=" + headers + ", content= " + this.body + " bytes " +
Arrays.toString(bytesBody);
}
public boolean isPing()
{
return isPing;
}
public void setPing(boolean ping)
{
isPing = ping;
}
public ActiveMQBuffer toActiveMQBuffer() throws Exception
{
if (buffer == null)
{
if (bytesBody != null)
{
buffer = ActiveMQBuffers.dynamicBuffer(bytesBody.length + 512);
}
else
{
buffer = ActiveMQBuffers.dynamicBuffer(512);
}
if (isPing())
{
buffer.writeByte((byte) 10);
return buffer;
}
StringBuffer head = new StringBuffer();
head.append(command);
head.append(Stomp.NEWLINE);
// Output the headers.
for (Map.Entry<String, String> header : headers.entrySet())
{
head.append(header.getKey());
head.append(Stomp.Headers.SEPARATOR);
head.append(header.getValue());
head.append(Stomp.NEWLINE);
}
// Add a newline to separate the headers from the content.
head.append(Stomp.NEWLINE);
buffer.writeBytes(head.toString().getBytes(StandardCharsets.UTF_8));
if (bytesBody != null)
{
buffer.writeBytes(bytesBody);
}
buffer.writeBytes(END_OF_FRAME);
size = buffer.writerIndex();
}
else
{
buffer.readerIndex(0);
}
return buffer;
}
public String getHeader(String key)
{
return headers.get(key);
}
public void addHeader(String key, String val)
{
headers.put(key, val);
}
public Map<String, String> getHeadersMap()
{
return headers;
}
public class Header
{
public String key;
public String val;
public Header(String key, String val)
{
this.key = key;
this.val = val;
}
public String getEncodedKey()
{
return encode(key);
}
public String getEncodedValue()
{
return encode(val);
}
}
public String encode(String str)
{
int len = str.length();
char[] buffer = new char[2 * len];
int iBuffer = 0;
for (int i = 0; i < len; i++)
{
char c = str.charAt(i);
// \n
if (c == (byte) 10)
{
buffer[iBuffer] = (byte) 92;
buffer[++iBuffer] = (byte) 110;
}
// \r
else if (c == (byte) 13)
{
buffer[iBuffer] = (byte) 92;
buffer[++iBuffer] = (byte) 114;
}
// \
else if (c == (byte) 92)
{
buffer[iBuffer] = (byte) 92;
buffer[++iBuffer] = (byte) 92;
}
// :
else if (c == (byte) 58)
{
buffer[iBuffer] = (byte) 92;
buffer[++iBuffer] = (byte) 99;
}
else
{
buffer[iBuffer] = c;
}
iBuffer++;
}
char[] total = new char[iBuffer];
System.arraycopy(buffer, 0, total, 0, iBuffer);
return new String(total);
}
public void setBody(String body)
{
this.body = body;
this.bytesBody = body.getBytes(StandardCharsets.UTF_8);
}
public boolean hasHeader(String key)
{
return headers.containsKey(key);
}
public String getBody()
{
if (body == null)
{
if (bytesBody != null)
{
body = new String(bytesBody, StandardCharsets.UTF_8);
}
}
return body;
}
//Since 1.1, there is a content-type header that needs to take care of
public byte[] getBodyAsBytes()
{
return bytesBody;
}
public boolean needsDisconnect()
{
return disconnect;
}
public void setByteBody(byte[] content)
{
this.bytesBody = content;
}
public void setNeedsDisconnect(boolean b)
{
disconnect = b;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.hplsql;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.ParseException;
public class Arguments {
private CommandLine commandLine;
private Options options = new Options();
String execString;
String fileName;
String main;
Map<String, String> vars = new HashMap<String, String>();
@SuppressWarnings("static-access")
Arguments() {
// -e 'query'
options.addOption(OptionBuilder
.hasArg()
.withArgName("quoted-query-string")
.withDescription("HPL/SQL from command line")
.create('e'));
// -f <file>
options.addOption(OptionBuilder
.hasArg()
.withArgName("filename")
.withDescription("HPL/SQL from a file")
.create('f'));
// -main entry_point_name
options.addOption(OptionBuilder
.hasArg()
.withArgName("procname")
.withDescription("Entry point (procedure or function name)")
.create("main"));
// -hiveconf x=y
options.addOption(OptionBuilder
.withValueSeparator()
.hasArgs(2)
.withArgName("property=value")
.withLongOpt("hiveconf")
.withDescription("Value for given property")
.create());
// Substitution option -d, --define
options.addOption(OptionBuilder
.withValueSeparator()
.hasArgs(2)
.withArgName("key=value")
.withLongOpt("define")
.withDescription("Variable subsitution e.g. -d A=B or --define A=B")
.create('d'));
// Substitution option --hivevar
options.addOption(OptionBuilder
.withValueSeparator()
.hasArgs(2)
.withArgName("key=value")
.withLongOpt("hivevar")
.withDescription("Variable subsitution e.g. --hivevar A=B")
.create());
// [-version|--version]
options.addOption(new Option("version", "version", false, "Print HPL/SQL version"));
// [-trace|--trace]
options.addOption(new Option("trace", "trace", false, "Print debug information"));
// [-offline|--offline]
options.addOption(new Option("offline", "offline", false, "Offline mode - skip SQL execution"));
// [-H|--help]
options.addOption(new Option("H", "help", false, "Print help information"));
}
/**
* Parse the command line arguments
*/
public boolean parse(String[] args) {
try {
commandLine = new GnuParser().parse(options, args);
execString = commandLine.getOptionValue('e');
fileName = commandLine.getOptionValue('f');
main = commandLine.getOptionValue("main");
Properties p = commandLine.getOptionProperties("hiveconf");
for(String key : p.stringPropertyNames()) {
vars.put(key, p.getProperty(key));
}
p = commandLine.getOptionProperties("hivevar");
for(String key : p.stringPropertyNames()) {
vars.put(key, p.getProperty(key));
}
p = commandLine.getOptionProperties("define");
for(String key : p.stringPropertyNames()) {
vars.put(key, p.getProperty(key));
}
} catch (ParseException e) {
System.err.println(e.getMessage());
return false;
}
return true;
}
/**
* Get the value of execution option -e
*/
public String getExecString() {
return execString;
}
/**
* Get the value of file option -f
*/
public String getFileName() {
return fileName;
}
/**
* Get the value of -main option
*/
public String getMain() {
return main;
}
/**
* Get the variables
*/
public Map<String, String> getVars() {
return vars;
}
/**
* Test whether version option is set
*/
public boolean hasVersionOption() {
if(commandLine.hasOption("version")) {
return true;
}
return false;
}
/**
* Test whether debug option is set
*/
public boolean hasTraceOption() {
if(commandLine.hasOption("trace")) {
return true;
}
return false;
}
/**
* Test whether offline option is set
*/
public boolean hasOfflineOption() {
if(commandLine.hasOption("offline")) {
return true;
}
return false;
}
/**
* Test whether help option is set
*/
public boolean hasHelpOption() {
if(commandLine.hasOption('H')) {
return true;
}
return false;
}
/**
* Print help information
*/
public void printHelp() {
new HelpFormatter().printHelp("hplsql", options);
}
}
| |
/*
* Copyright (c) 2001-2004 Caucho Technology, Inc. All rights reserved.
*
* The Apache Software License, Version 1.1
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Caucho Technology (http://www.caucho.com/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "Burlap", "Resin", and "Caucho" must not be used to
* endorse or promote products derived from this software without prior
* written permission. For written permission, please contact
* info@caucho.com.
*
* 5. Products derived from this software may not be called "Resin"
* nor may "Resin" appear in their names without prior written
* permission of Caucho Technology.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL CAUCHO TECHNOLOGY OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
* OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* @author Scott Ferguson
*/
package com.alibaba.com.caucho.hessian.io;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.logging.*;
/**
* Serializing an object for known object types.
*/
public class BeanSerializer extends AbstractSerializer {
private static final Logger log
= Logger.getLogger(BeanSerializer.class.getName());
private static final Object []NULL_ARGS = new Object[0];
private Method []_methods;
private String []_names;
private Object _writeReplaceFactory;
private Method _writeReplace;
public BeanSerializer(Class cl, ClassLoader loader)
{
introspectWriteReplace(cl, loader);
ArrayList primitiveMethods = new ArrayList();
ArrayList compoundMethods = new ArrayList();
for (; cl != null; cl = cl.getSuperclass()) {
Method []methods = cl.getDeclaredMethods();
for (int i = 0; i < methods.length; i++) {
Method method = methods[i];
if (Modifier.isStatic(method.getModifiers()))
continue;
if (method.getParameterTypes().length != 0)
continue;
String name = method.getName();
if (! name.startsWith("get"))
continue;
Class type = method.getReturnType();
if (type.equals(void.class))
continue;
if (findSetter(methods, name, type) == null)
continue;
// XXX: could parameterize the handler to only deal with public
method.setAccessible(true);
if (type.isPrimitive()
|| type.getName().startsWith("java.lang.")
&& ! type.equals(Object.class))
primitiveMethods.add(method);
else
compoundMethods.add(method);
}
}
ArrayList methodList = new ArrayList();
methodList.addAll(primitiveMethods);
methodList.addAll(compoundMethods);
Collections.sort(methodList, new MethodNameCmp());
_methods = new Method[methodList.size()];
methodList.toArray(_methods);
_names = new String[_methods.length];
for (int i = 0; i < _methods.length; i++) {
String name = _methods[i].getName();
name = name.substring(3);
int j = 0;
for (; j < name.length() && Character.isUpperCase(name.charAt(j)); j++) {
}
if (j == 1)
name = name.substring(0, j).toLowerCase() + name.substring(j);
else if (j > 1)
name = name.substring(0, j - 1).toLowerCase() + name.substring(j - 1);
_names[i] = name;
}
}
private void introspectWriteReplace(Class cl, ClassLoader loader)
{
try {
String className = cl.getName() + "HessianSerializer";
Class serializerClass = Class.forName(className, false, loader);
Object serializerObject = serializerClass.newInstance();
Method writeReplace = getWriteReplace(serializerClass, cl);
if (writeReplace != null) {
_writeReplaceFactory = serializerObject;
_writeReplace = writeReplace;
return;
}
} catch (ClassNotFoundException e) {
} catch (Exception e) {
log.log(Level.FINER, e.toString(), e);
}
_writeReplace = getWriteReplace(cl);
}
/**
* Returns the writeReplace method
*/
protected Method getWriteReplace(Class cl)
{
for (; cl != null; cl = cl.getSuperclass()) {
Method []methods = cl.getDeclaredMethods();
for (int i = 0; i < methods.length; i++) {
Method method = methods[i];
if (method.getName().equals("writeReplace") &&
method.getParameterTypes().length == 0)
return method;
}
}
return null;
}
/**
* Returns the writeReplace method
*/
protected Method getWriteReplace(Class cl, Class param)
{
for (; cl != null; cl = cl.getSuperclass()) {
for (Method method : cl.getDeclaredMethods()) {
if (method.getName().equals("writeReplace")
&& method.getParameterTypes().length == 1
&& param.equals(method.getParameterTypes()[0]))
return method;
}
}
return null;
}
public void writeObject(Object obj, AbstractHessianOutput out)
throws IOException
{
if (out.addRef(obj))
return;
Class cl = obj.getClass();
try {
if (_writeReplace != null) {
Object repl;
if (_writeReplaceFactory != null)
repl = _writeReplace.invoke(_writeReplaceFactory, obj);
else
repl = _writeReplace.invoke(obj);
out.removeRef(obj);
out.writeObject(repl);
out.replaceRef(repl, obj);
return;
}
} catch (Exception e) {
log.log(Level.FINER, e.toString(), e);
}
int ref = out.writeObjectBegin(cl.getName());
if (ref < -1) {
// Hessian 1.1 uses a map
for (int i = 0; i < _methods.length; i++) {
Method method = _methods[i];
Object value = null;
try {
value = _methods[i].invoke(obj, (Object []) null);
} catch (Exception e) {
log.log(Level.FINE, e.toString(), e);
}
out.writeString(_names[i]);
out.writeObject(value);
}
out.writeMapEnd();
}
else {
if (ref == -1) {
out.writeInt(_names.length);
for (int i = 0; i < _names.length; i++)
out.writeString(_names[i]);
out.writeObjectBegin(cl.getName());
}
for (int i = 0; i < _methods.length; i++) {
Method method = _methods[i];
Object value = null;
try {
value = _methods[i].invoke(obj, (Object []) null);
} catch (Exception e) {
log.log(Level.FINER, e.toString(), e);
}
out.writeObject(value);
}
}
}
/**
* Finds any matching setter.
*/
private Method findSetter(Method []methods, String getterName, Class arg)
{
String setterName = "set" + getterName.substring(3);
for (int i = 0; i < methods.length; i++) {
Method method = methods[i];
if (! method.getName().equals(setterName))
continue;
if (! method.getReturnType().equals(void.class))
continue;
Class []params = method.getParameterTypes();
if (params.length == 1 && params[0].equals(arg))
return method;
}
return null;
}
static class MethodNameCmp implements Comparator<Method> {
public int compare(Method a, Method b)
{
return a.getName().compareTo(b.getName());
}
}
}
| |
package com.dieselpoint.raft;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import org.slf4j.MDC;
import org.slf4j.bridge.SLF4JBridgeHandler;
import com.dieselpoint.buffers.ByteArray;
import com.dieselpoint.cluster.common.Command;
import com.dieselpoint.cluster.common.CommandLog;
import com.dieselpoint.cluster.common.DummyStateMachine;
import com.dieselpoint.cluster.common.Transport;
import com.dieselpoint.cluster.raft.RaftClient;
import com.dieselpoint.cluster.raft.RaftSerializer;
import com.dieselpoint.cluster.raft.RaftServer;
import com.dieselpoint.log.disk.DiskLog;
import com.dieselpoint.testutils.LoggerSetup;
import com.dieselpoint.transport.jaxrs.JaxrsTransport;
import com.dieselpoint.util.FileUtil;
public class TestRaftServer {
public static final int PORT = 50053;
public static final int RUNFOR = 1 * 60 * 1000; // ms
public static final int SERVER_COUNT = 3;
String maindir;
int commandInt;
@Test
public void test() throws IOException, InterruptedException, ExecutionException {
maindir = doConfig();
System.out.println("starting test raft server");
int serverCount = SERVER_COUNT;
List<String> members = new ArrayList();
for (int i = 0; i < serverCount; i++) {
members.add("127.0.0." + (i + 1));
}
List<RaftServer> servers = new ArrayList();
for (int i = 0; i < serverCount; i++) {
String rafthost = "127.0.0." + (i + 1);
RaftServer rs = getRaftServer(rafthost, PORT, members);
servers.add(rs);
//rs.getCommitLog().dump(instanceDir + "/startdump");
}
for (RaftServer server: servers) {
server.start();
}
RaftClient client = getRaftClient(members);
client.setTimeout(1000);
ScheduledFuture<?> clientProcessFuture = Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(() -> {
ByteArray commandBytes = new ByteArray();
commandBytes.appendInt(commandInt);
commandInt++;
Command command = new Command(Command.TYPE_STATEMACHINE_COMMAND, commandBytes);
client.send(command);
}, 1, 100, TimeUnit.MILLISECONDS);
randomStopStart(servers, members);
// they should all be in a started state here
// stop sending commands
System.out.println("starting cool down period");
clientProcessFuture.cancel(false);
// wait a little while for the commands to be processed and for
// all the servers to catch up
Thread.sleep(10000);
// stop them all
for (RaftServer server: servers) {
server.stop();
}
System.out.println("dumping logs");
// dump logs
List<String> commitLogDumpFiles = new ArrayList();
List<String> stateMachineDumpFiles = new ArrayList();
for (RaftServer server: servers) {
File dumpFile = getCommitLog(server.getHost()).dump();
String dump = FileUtil.getFileAsString(dumpFile);
commitLogDumpFiles.add(dump);
System.out.println(dumpFile + " size:" + dump.length());
DummyStateMachine sm = (DummyStateMachine) server.getStateMachine();
File smDumpFile = sm.dump(server.getDataDir());
String d = FileUtil.getFileAsString(smDumpFile);
stateMachineDumpFiles.add(d);
System.out.println(smDumpFile + " size:" + smDumpFile.length());
}
// compare every dump file against every other one
// every file must equal or be a prefix of every other file
System.out.println("Comparing commit logs...");
compareDumps(commitLogDumpFiles);
System.out.println("Comparing statemachine logs...");
compareDumps(stateMachineDumpFiles);
}
private void compareDumps(List<String> dumps) {
boolean success = true;
for (int i = 0; i < dumps.size(); i++) {
String firstFile = dumps.get(i);
for (int j = i + 1; j < dumps.size(); j++) {
String otherFile = dumps.get(j);
if (!firstFile.startsWith(otherFile) && !otherFile.startsWith(firstFile)) {
success = false;
System.out.println("FAIL: dump files don't match. first:" + i + " second:" + j);
}
}
}
if (success) {
System.out.println("files compared ok");
}
}
private void randomStopStart(List<RaftServer> servers, List<String> members) throws InterruptedException {
Random rand = new Random();
long seed = -1289377734309993880L;//rand.nextLong();
rand.setSeed(seed);
System.out.println("starting with seed:" + seed);
long start = System.currentTimeMillis();
while (true) {
// wait random amount of time from 0 to 5 sec
Thread.sleep(rand.nextInt(5000));
long elapsed = System.currentTimeMillis() - start;
// randomly select a server to start/stop
int num = rand.nextInt(servers.size());
RaftServer server = servers.get(num);
if (server.isRunning()) {
System.out.println("stopping " + num + " elapsed " + elapsed);
server.stop();
} else {
System.out.println("starting " + num + " elapsed " + elapsed);
RaftServer newServer = getRaftServer(server.getHost(), PORT, members);
servers.set(num, newServer);
newServer.start();
}
if (System.currentTimeMillis() > RUNFOR + start) {
break;
}
}
// at the end, leave them all in the started state so they can clean up
for (int i = 0; i < servers.size(); i++) {
RaftServer server = servers.get(i);
if (!server.isRunning()) {
RaftServer newServer = getRaftServer(server.getHost(), PORT, members);
servers.set(i, newServer);
newServer.start();
}
}
}
private RaftClient getRaftClient(List<String> members) {
Transport transport = getTransport("blah");
return new RaftClient(transport, members);
}
private Transport getTransport(String host) {
//return new SimulatorTransport(host, false);
return new JaxrsTransport(host, PORT, new RaftSerializer());
}
private String getInstanceDir(String host) {
return maindir + "/rs" + host;
}
private CommandLog getCommitLog(String host) {
String instanceDir = getInstanceDir(host);
return new DiskLog(instanceDir + "/commitlog");
//return new KVLog(instanceDir + "/commitlog");
}
private RaftServer getRaftServer(String host, int port, List<String> members) {
String instanceDir = getInstanceDir(host);
Transport trans = getTransport(host);
CommandLog log = getCommitLog(host);
RaftServer raftServer = new RaftServer();
raftServer.setDataDir(instanceDir);
raftServer.setMembers(members);
raftServer.setStateMachine(new DummyStateMachine(getInstanceDir(host), DummyStateMachine.class.getName() + "." + host));
raftServer.setCommandLog(log);
raftServer.setTransport(trans);
raftServer.setHost(host);
return raftServer;
}
private String doConfig() {
MDC.put("rafthost", "testraftserver");
Thread.currentThread().setName("testraftserverthread");
String maindir = System.getProperty("user.dir") + "/instances";
String logbackFile = System.getProperty("user.dir") + "/../diesel.cluster.test/etc/logback.xml";
System.setProperty("logback.configurationFile", logbackFile);
LoggerSetup.init();
// fixes grizzly logging
SLF4JBridgeHandler.removeHandlersForRootLogger();
SLF4JBridgeHandler.install();
return maindir;
}
}
| |
/*
*
* Paros and its related class files.
*
* Paros is an HTTP/HTTPS proxy for assessing web application security.
* Copyright (C) 2003-2005 Chinotec Technologies Company
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Clarified Artistic License
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Clarified Artistic License for more details.
*
* You should have received a copy of the Clarified Artistic License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
// ZAP: 2011/05/27 Catch any exception when loading the config file
// ZAP: 2011/11/15 Changed to use ZapXmlConfiguration, to enforce the same character encoding when reading/writing configurations
// removed duplicated method calls and removed an unnecessary method (load())
// ZAP: 2013/01/23 Clean up of exception handling/logging.
// ZAP: 2013/03/03 Issue 546: Remove all template Javadoc comments
// ZAP: 2013/05/02 Re-arranged all modifiers into Java coding standard order
// ZAP: 2014/01/17 Issue 987: Allow arbitrary config file values to be set via the command line
// ZAP: 2014/02/21 Issue 1043: Custom active scan dialog
// ZAP: 2016/09/22 JavaDoc tweaks
// ZAP: 2016/11/17 Issue 2701 Support Factory Reset
// ZAP: 2017/03/26 Obtain configs in the order specified
// ZAP: 2017/06/02 Add helper methods.
package org.parosproxy.paros.common;
import java.util.Map.Entry;
import org.apache.commons.configuration.ConfigurationUtils;
import org.apache.commons.configuration.ConversionException;
import org.apache.commons.configuration.FileConfiguration;
import org.apache.log4j.Logger;
import org.zaproxy.zap.control.ControlOverrides;
import org.zaproxy.zap.utils.ZapXmlConfiguration;
public abstract class AbstractParam implements Cloneable {
private static final Logger logger = Logger.getLogger(AbstractParam.class);
private FileConfiguration config = null;
/**
* Loads the configurations from the given configuration file.
*
* @param config the configuration file
*/
public void load(FileConfiguration config) {
this.config = config;
try {
parse();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* Loads the configurations from the file located at the given path.
*
* @param filePath the path to the configuration file, might be relative.
*/
public void load(String filePath) {
this.load(filePath, null);
}
/**
* Loads the configurations from the file located at the given path and using the given overrides
*
* @param filePath the path to the configuration file, might be relative.
* @param overrides the configuration overrides, might be {@code null}.
*/
public void load(String filePath, ControlOverrides overrides) {
try {
config = new ZapXmlConfiguration(filePath);
if (overrides != null) {
for (Entry<String,String> entry : overrides.getOrderedConfigs().entrySet()) {
logger.info("Setting config " + entry.getKey() + " = " + entry.getValue() +
" was " + config.getString(entry.getKey()));
config.setProperty(entry.getKey(), entry.getValue());
}
}
parse();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* Gets the configuration file, previously loaded.
*
* @return the configurations file
*/
public FileConfiguration getConfig() {
return config;
}
@Override
public AbstractParam clone() {
try {
AbstractParam clone = (AbstractParam) super.clone();
clone.load((FileConfiguration) ConfigurationUtils.cloneConfiguration(config));
return clone;
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* Parses the configurations.
* <p>
* Called each time the configurations are loaded.
*
* @see #getConfig()
*/
protected abstract void parse();
/**
* Will be called to reset the options to factory defaults.
* Most classes will not need to do anything, but those that do can override this method.
*/
public void reset() {
// Do nothing
}
/**
* Gets the {@code String} with the given configuration key.
* <p>
* The default value is returned if the key doesn't exist or it's not a {@code String}.
*
* @param key the configuration key.
* @param defaultValue the default value, if the key doesn't exist or it's not a {@code String}.
* @return the value of the configuration, or default value.
* @since TODO add version
*/
protected String getString(String key, String defaultValue) {
try {
return getConfig().getString(key, defaultValue);
} catch (ConversionException e) {
logConversionException(key, e);
}
return defaultValue;
}
/**
* Logs the given {@code ConversionException}, that occurred while reading the configuration with the given key.
*
* @param key the configuration key.
* @param e the {@code ConversionException}.
* @since TODO add version
*/
protected static void logConversionException(String key, ConversionException e) {
logger.warn("Failed to read '" + key + "'", e);
}
/**
* Gets the {@code boolean} with the given configuration key.
* <p>
* The default value is returned if the key doesn't exist or it's not a {@code boolean}.
*
* @param key the configuration key.
* @param defaultValue the default value, if the key doesn't exist or it's not a {@code boolean}.
* @return the value of the configuration, or default value.
* @since TODO add version
*/
protected boolean getBoolean(String key, boolean defaultValue) {
try {
return getConfig().getBoolean(key, defaultValue);
} catch (ConversionException e) {
logConversionException(key, e);
}
return defaultValue;
}
/**
* Gets the {@code int} with the given configuration key.
* <p>
* The default value is returned if the key doesn't exist or it's not a {@code int}.
*
* @param key the configuration key.
* @param defaultValue the default value, if the key doesn't exist or it's not an {@code int}.
* @return the value of the configuration, or default value.
* @since TODO add version
*/
protected int getInt(String key, int defaultValue) {
try {
return getConfig().getInt(key, defaultValue);
} catch (ConversionException e) {
logConversionException(key, e);
}
return defaultValue;
}
/**
* Gets the {@code Integer} with the given configuration key.
* <p>
* The default value is returned if the key doesn't exist or it's not a {@code Integer}.
*
* @param key the configuration key.
* @param defaultValue the default value, if the key doesn't exist or it's not an {@code Integer}.
* @return the value of the configuration, or default value.
* @since TODO add version
*/
protected Integer getInteger(String key, Integer defaultValue) {
try {
return getConfig().getInteger(key, defaultValue);
} catch (ConversionException e) {
logConversionException(key, e);
}
return defaultValue;
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.analytics.shared.data.agents.log4j.util;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.helpers.FormattingInfo;
import org.apache.log4j.helpers.PatternConverter;
import org.apache.log4j.helpers.PatternParser;
import org.apache.log4j.spi.LoggingEvent;
import org.wso2.carbon.base.ServerConfiguration;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.utils.logging.TenantAwareLoggingEvent;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.security.AccessController;
import java.security.PrivilegedAction;
public class TenantAwarePatternLayout extends PatternLayout {
public static final String DEFAULT_TENANT_PATTERN = "[%T][%S]";
private static String tenantPattern = "[%T][%S]";
private static String superTenantText = null;
public TenantAwarePatternLayout() {
}
public TenantAwarePatternLayout(String pattern) {
super(pattern);
}
protected PatternParser createPatternParser(String pattern) {
return new TenantAwarePatternLayout.TenantAwarePatternParser(pattern);
}
public synchronized void setTenantPattern(String tenantPattern) {
tenantPattern = tenantPattern;
}
public static void setSuperTenantText(String superTenantText) {
superTenantText = superTenantText;
}
private static class TenantAwarePatternParser extends PatternParser {
InetAddress inetAddress;
String address;
String serverName = (String) AccessController.doPrivileged(new PrivilegedAction() {
public String run() {
return ServerConfiguration.getInstance().getFirstProperty("ServerKey");
}
});
public TenantAwarePatternParser(String pattern) {
super(pattern);
try {
this.inetAddress = InetAddress.getLocalHost();
this.address = this.inetAddress.getHostAddress();
} catch (UnknownHostException var3) {
this.address = "127.0.0.1";
}
}
protected void finalizeConverter(char c) {
Object pc = null;
switch (c) {
case '@':
pc = new TenantAwarePatternLayout.TenantAwarePatternParser.AtSignPatternConverter(this.formattingInfo);
break;
case 'A':
pc = new TenantAwarePatternLayout.TenantAwarePatternParser.AppNamePatternConverter(this.formattingInfo, this.extractPrecisionOption());
break;
case 'B':
case 'C':
case 'E':
case 'F':
case 'G':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'Q':
case 'R':
default:
super.finalizeConverter(c);
break;
case 'D':
pc = new TenantAwarePatternLayout.TenantAwarePatternParser.TenantDomainPatternConverter(this.formattingInfo, this.extractPrecisionOption());
break;
case 'H':
pc = new TenantAwarePatternLayout.TenantAwarePatternParser.HostNamePatternConverter(this.formattingInfo, this.extractPrecisionOption(), this.address);
break;
case 'I':
pc = new TenantAwarePatternLayout.TenantAwarePatternParser.InstanceIdPatternConverter(this.formattingInfo, this.extractPrecisionOption());
break;
case 'P':
pc = new TenantAwarePatternLayout.TenantAwarePatternParser.TenantPatternConverter(this.formattingInfo, this.extractPrecisionOption());
break;
case 'S':
pc = new TenantAwarePatternLayout.TenantAwarePatternParser.ServerNamePatternConverter(this.formattingInfo, this.extractPrecisionOption(), this.serverName);
break;
case 'T':
pc = new TenantAwarePatternLayout.TenantAwarePatternParser.TenantIdPatternConverter(this.formattingInfo, this.extractPrecisionOption());
break;
case 'U':
pc = new TenantAwarePatternLayout.TenantAwarePatternParser.UserNamePatternConverter(this.formattingInfo, this.extractPrecisionOption());
}
if (pc != null) {
this.currentLiteral.setLength(0);
this.addConverter((PatternConverter) pc);
}
}
private static class TenantPatternConverter extends TenantAwarePatternLayout.TenantAwarePatternParser.TenantAwareNamedPatternConverter {
public TenantPatternConverter(FormattingInfo formattingInfo, int precision) {
super(formattingInfo, precision);
}
public String getFullyQualifiedName(LoggingEvent event) {
int tenantId = ((Integer) AccessController.doPrivileged(new PrivilegedAction() {
public Integer run() {
return Integer.valueOf(CarbonContext.getThreadLocalCarbonContext().getTenantId());
}
})).intValue();
return tenantId != -1 && tenantId != -1234 ? (new TenantAwarePatternLayout(TenantAwarePatternLayout.tenantPattern)).format(event) : TenantAwarePatternLayout.superTenantText;
}
}
private static class AtSignPatternConverter extends TenantAwarePatternLayout.TenantAwarePatternParser.TenantAwareNamedPatternConverter {
public AtSignPatternConverter(FormattingInfo formattingInfo) {
super(formattingInfo, -1);
}
public String getFullyQualifiedName(LoggingEvent event) {
return CarbonContext.getThreadLocalCarbonContext().getTenantDomain() != null ? "@" : null;
}
}
private static class AppNamePatternConverter extends TenantAwarePatternLayout.TenantAwarePatternParser.TenantAwareNamedPatternConverter {
public AppNamePatternConverter(FormattingInfo formattingInfo, int precision) {
super(formattingInfo, precision);
}
public String getFullyQualifiedName(LoggingEvent event) {
if (event instanceof TenantAwareLoggingEvent) {
return ((TenantAwareLoggingEvent) event).getServiceName() != null ? ((TenantAwareLoggingEvent) event).getServiceName() : "";
} else {
String appName = CarbonContext.getThreadLocalCarbonContext().getApplicationName();
return appName != null ? appName : "";
}
}
}
private static class InstanceIdPatternConverter extends TenantAwarePatternLayout.TenantAwarePatternParser.TenantAwareNamedPatternConverter {
public InstanceIdPatternConverter(FormattingInfo formattingInfo, int precision) {
super(formattingInfo, precision);
}
public String getFullyQualifiedName(LoggingEvent event) {
String stratosInstance = System.getProperty("carbon.instance.name");
return stratosInstance != null ? stratosInstance : "";
}
}
private static class HostNamePatternConverter extends TenantAwarePatternLayout.TenantAwarePatternParser.TenantAwareNamedPatternConverter {
String address;
public HostNamePatternConverter(FormattingInfo formattingInfo, int precision, String hostAddress) {
super(formattingInfo, precision);
this.address = hostAddress;
}
public String getFullyQualifiedName(LoggingEvent event) {
return this.address;
}
}
private static class ServerNamePatternConverter extends TenantAwarePatternLayout.TenantAwarePatternParser.TenantAwareNamedPatternConverter {
String name;
public ServerNamePatternConverter(FormattingInfo formattingInfo, int precision, String serverName) {
super(formattingInfo, precision);
this.name = serverName;
}
public String getFullyQualifiedName(LoggingEvent event) {
return this.name;
}
}
private static class TenantDomainPatternConverter extends TenantAwarePatternLayout.TenantAwarePatternParser.TenantAwareNamedPatternConverter {
public TenantDomainPatternConverter(FormattingInfo formattingInfo, int precision) {
super(formattingInfo, precision);
}
public String getFullyQualifiedName(LoggingEvent event) {
return CarbonContext.getThreadLocalCarbonContext().getTenantDomain();
}
}
private static class UserNamePatternConverter extends TenantAwarePatternLayout.TenantAwarePatternParser.TenantAwareNamedPatternConverter {
public UserNamePatternConverter(FormattingInfo formattingInfo, int precision) {
super(formattingInfo, precision);
}
public String getFullyQualifiedName(LoggingEvent event) {
return CarbonContext.getThreadLocalCarbonContext().getUsername();
}
}
private static class TenantIdPatternConverter extends TenantAwarePatternLayout.TenantAwarePatternParser.TenantAwareNamedPatternConverter {
public TenantIdPatternConverter(FormattingInfo formattingInfo, int precision) {
super(formattingInfo, precision);
}
public String getFullyQualifiedName(LoggingEvent event) {
if (event instanceof TenantAwareLoggingEvent) {
return ((TenantAwareLoggingEvent) event).getTenantId();
} else {
int tenantId = ((Integer) AccessController.doPrivileged(new PrivilegedAction() {
public Integer run() {
return Integer.valueOf(CarbonContext.getThreadLocalCarbonContext().getTenantId());
}
})).intValue();
return tenantId != -1 ? Integer.toString(tenantId) : null;
}
}
}
private abstract static class TenantAwareNamedPatternConverter extends PatternConverter {
private int precision;
public TenantAwareNamedPatternConverter(FormattingInfo formattingInfo, int precision) {
super(formattingInfo);
this.precision = precision;
}
protected abstract String getFullyQualifiedName(LoggingEvent var1);
public String convert(LoggingEvent event) {
String n = this.getFullyQualifiedName(event);
if (n == null) {
return "";
} else if (this.precision <= 0) {
return n;
} else {
int len = n.length();
int end = len - 1;
for (int i = this.precision; i > 0; --i) {
end = n.lastIndexOf(46, end - 1);
if (end == -1) {
return n;
}
}
return n.substring(end + 1, len);
}
}
}
}
}
| |
package org.rabix.bindings.cwl.expression;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.rabix.bindings.cwl.bean.CWLJob;
import org.rabix.bindings.cwl.bean.CWLRuntime;
import org.rabix.bindings.cwl.bean.resource.requirement.CWLInlineJavascriptRequirement;
import org.rabix.bindings.cwl.expression.javascript.CWLExpressionJavascriptResolver;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
public class CWLExpressionResolver {
public static String KEY_EXPRESSION_VALUE = "script";
public static String KEY_EXPRESSION_LANGUAGE = "engine";
private static String segSymbol = "\\w+";
private static String segSingle = "\\['([^']|\\\\')+'\\]";
private static String segDouble = "\\[\"([^\"]|\\\\\")+\"\\]";
private static String segIndex = "\\[[0-9]+\\]";
private static String segments = String.format("(.%s|%s|%s|%s)", segSymbol, segSingle, segDouble, segIndex);
private static String paramRe = String.format("\\$\\((%s)%s*\\)", segSymbol, segments);
private static Pattern segPattern = Pattern.compile(segments);
private static Pattern pattern = Pattern.compile(paramRe);
public static final ObjectMapper sortMapper = new ObjectMapper();
static {
sortMapper.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true);
}
@SuppressWarnings({ "unchecked" })
public static <T> T resolve(final Object expression, final CWLJob job, final Object self) throws CWLExpressionException {
if (expression == null) {
return null;
}
if (isExpressionObject(expression)) {
String script = (String) ((Map<?, ?>) expression).get(KEY_EXPRESSION_VALUE);
List<String> expressionLibs = Collections.<String>emptyList();
CWLInlineJavascriptRequirement inlineJavascriptRequirement = job.getApp().getInlineJavascriptRequirement();
if (inlineJavascriptRequirement != null) {
expressionLibs = inlineJavascriptRequirement.getExpressionLib();
}
return (T) stripDollarSign(CWLExpressionJavascriptResolver.evaluate(job.getInputs(), self, script, job.getRuntime(), expressionLibs));
}
if (expression instanceof String) {
if (job.isInlineJavascriptEnabled()) {
List<String> expressionLibs = Collections.<String>emptyList();
CWLInlineJavascriptRequirement inlineJavascriptRequirement = job.getApp().getInlineJavascriptRequirement();
if (inlineJavascriptRequirement != null) {
expressionLibs = inlineJavascriptRequirement.getExpressionLib();
}
return (T) stripDollarSign(process((String) expression, job.getInputs(), self, job.getRuntime(), expressionLibs));
} else {
Map<String, Object> vars = new HashMap<>();
vars.put("inputs", job.getInputs());
vars.put("self", self);
CWLRuntime runtime = job.getRuntime();
if (runtime != null) {
vars.put("runtime", runtime.toMap());
}
return (T) stripDollarSign(paramInterpolate((String) expression, vars, true));
}
}
return (T) expression;
}
private static Object stripDollarSign(Object value) {
if (value instanceof String) {
return ((String) value).replaceAll(Matcher.quoteReplacement("\\$"), "\\$");
}
return value;
}
public static boolean isExpressionObject(Object expression) {
return expression instanceof Map<?,?> && ((Map<?,?>) expression).containsKey(KEY_EXPRESSION_VALUE) && ((Map<?,?>) expression).containsKey(KEY_EXPRESSION_LANGUAGE);
}
private static Object nextSegment(String remaining, Object vars) throws CWLExpressionException {
if (vars == null) {
return null;
}
if (!StringUtils.isEmpty(remaining)) {
Matcher m = segPattern.matcher(remaining);
if (m.find()) {
if (m.group(0).startsWith(".")) {
if(m.group(0).equals(".length") && vars instanceof List){
return ((List<?>) vars).size();
}
return nextSegment(remaining.substring(m.end(0)), ((Map<?, ?>) vars).get(m.group(0).substring(1)));
} else if (m.group(0).charAt(1) == '\"' || m.group(0).charAt(1) == '\'') {
Character start = m.group(0).charAt(1);
String key = m.group(0).substring(2, m.group(0).lastIndexOf(start));
key = key.replace("\\'", "'");
key = key.replace("\\\"", "\"");
return nextSegment(remaining.substring(m.end(0)), ((Map<?, ?>) vars).get(key));
} else {
String key = m.group(0).substring(1, m.group(0).length());
Integer keyInt = Integer.parseInt(key);
Object remainingVars = null;
if (vars instanceof List<?>) {
if (((List<?>) vars).size() <= keyInt) {
throw new CWLExpressionException("Could not get value from " + vars + " at position " + keyInt);
}
remainingVars = ((List<?>) vars).get(keyInt);
} else if (vars instanceof Map<?,?>) {
remainingVars = ((Map<?,?>) vars).get(keyInt);
}
return nextSegment(remaining.substring(m.end(0)), remainingVars);
}
}
}
return vars;
}
private static Object paramInterpolate(String ex, Map<String, Object> obj, boolean strip) throws CWLExpressionException {
Matcher m = pattern.matcher(ex);
if (m.find()) {
Object leaf = nextSegment(m.group(0).substring(m.end(1) - m.start(0), m.group(0).length() - 1), obj.get(m.group(1)));
if (strip && ex.trim().length() == m.group(0).length()) {
return leaf;
} else {
try {
String leafStr = sortMapper.writeValueAsString(leaf);
if (leafStr.startsWith("\"")) {
leafStr = leafStr.substring(1, leafStr.length() - 1);
}
return ex.substring(0, m.start(0)) + leafStr + paramInterpolate(ex.substring(m.end(0)), obj, false);
} catch (JsonProcessingException e) {
throw new CWLExpressionException("Failed to serialize " + leaf + " to JSON.", e);
}
}
}
return ex;
}
private static boolean isEscaped(char[] chars, int i) {
if (i == 0)
return false;
return chars[i - 1] == '\\' && !isEscaped(chars, i - 1);
}
private static boolean startsExpression(char[] chars, int i) {
return chars[i] == '$' && !isEscaped(chars, i) && i != chars.length && (chars[i + 1] == '{' || chars[i + 1] == '(');
}
private static Object process(String value, Object inputs, Object self, CWLRuntime runtime, List<String> engineConfigs) throws CWLExpressionException {
char[] chars = value.trim().toCharArray();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < chars.length; i++) {
char c = chars[i];
if (startsExpression(chars, i)) {
String expression = new Seeker(chars).extractExpression(i + 1);
if (expression == null)
throw new CWLExpressionException("Expression left open: " + value.substring(i));
Object resolved = CWLExpressionJavascriptResolver.evaluate(inputs, self, expression.toString(), runtime, engineConfigs);
if (expression.length() == chars.length - 1) {
return resolved;
}
i = i + expression.length();
sb.append(resolved == null ? "null" : resolved.toString());
} else {
sb.append(c);
}
}
return sb.toString();
}
private static class Seeker {
private StringBuilder sb;
private char[] chars;
public Seeker(char[] chars) {
this.sb = new StringBuilder();
this.chars = chars;
}
private static char close(char c) {
if (c == '{')
return '}';
if (c == '(')
return ')';
return 0;
}
private String extractExpression(int start) {
char open = chars[start];
char close = close(open);
int opened = 0;
int i = start;
while (i < chars.length) {
char c = chars[i];
sb.append(c);
if (c == open && !isEscaped(chars, i)) {
opened++;
} else if (c == close && !isEscaped(chars, i)) {
opened--;
}
if (opened == 0) {
return sb.toString();
}
i = skipStringycontent(i);
i++;
}
return null;
}
private int skipStringycontent(int start) {
char c = chars[start];
if (isEscaped(chars, start))
return start;
if (c == '/') {
if (chars[start + 1] == '/')
return skipUntil(start, '\n');
if (chars[start + 1] == '*')
return skipFullComments(start);
return start;
}
if(c == '\'' || c == '\"')
return skipUntil(start, c);
return start;
}
private int skipFullComments(int start) {
int i = start;
do {
i = skipUntil(i, '/');
} while (chars[i - 1] != '*');
return i;
}
private int skipUntil(int start, char goal) {
int i = start + 1;
while (i < chars.length) {
char c = chars[i];
sb.append(c);
if (goal == c && !isEscaped(chars, i)) {
return i;
}
i++;
}
return i;
}
}
}
| |
/**************************************************************************************
* Copyright (c) 2013-2015, Finnish Social Science Data Archive/University of Tampere *
* *
* All rights reserved. *
* *
* Redistribution and use in source and binary forms, with or without modification, *
* are permitted provided that the following conditions are met: *
* 1. Redistributions of source code must retain the above copyright notice, this *
* list of conditions and the following disclaimer. *
* 2. Redistributions in binary form must reproduce the above copyright notice, *
* this list of conditions and the following disclaimer in the documentation *
* and/or other materials provided with the distribution. *
* 3. Neither the name of the copyright holder nor the names of its contributors *
* may be used to endorse or promote products derived from this software *
* without specific prior written permission. *
* *
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND *
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED *
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE *
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR *
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES *
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; *
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON *
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT *
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS *
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *
**************************************************************************************/
package fi.uta.fsd.metka.storage.variables.impl;
import fi.uta.fsd.Logger;
import fi.uta.fsd.metka.enums.*;
import fi.uta.fsd.metka.model.access.calls.ReferenceContainerDataFieldCall;
import fi.uta.fsd.metka.model.access.calls.ValueDataFieldCall;
import fi.uta.fsd.metka.model.access.enums.StatusCode;
import fi.uta.fsd.metka.model.data.RevisionData;
import fi.uta.fsd.metka.model.data.change.Change;
import fi.uta.fsd.metka.model.data.container.*;
import fi.uta.fsd.metka.model.data.value.Value;
import fi.uta.fsd.metka.model.general.DateTimeUserPair;
import fi.uta.fsd.metka.names.Fields;
import fi.uta.fsd.metka.spssio.por.PORFile;
import fi.uta.fsd.metka.spssio.por.input.PORReader;
import fi.uta.fsd.metka.storage.repository.*;
import fi.uta.fsd.metka.storage.repository.enums.RemoveResult;
import fi.uta.fsd.metka.storage.repository.enums.ReturnResult;
import fi.uta.fsd.metka.storage.response.OperationResponse;
import fi.uta.fsd.metka.storage.response.RevisionableInfo;
import fi.uta.fsd.metka.storage.util.ChangeUtil;
import fi.uta.fsd.metka.storage.variables.enums.ParseResult;
import fi.uta.fsd.metka.transfer.revision.RevisionCreateRequest;
import fi.uta.fsd.metkaAuthentication.AuthenticationUtil;
import org.apache.commons.lang3.tuple.*;
import java.io.IOException;
import java.util.*;
import static fi.uta.fsd.metka.storage.variables.impl.StudyVariablesParserImpl.checkResultForUpdate;
import static fi.uta.fsd.metka.storage.variables.impl.StudyVariablesParserImpl.resultCheck;
class PORVariablesParser implements VariablesParser {
private final DateTimeUserPair info;
private final String studyId;
private final RevisionRemoveRepository remove;
private final RevisionRepository revisions;
private final RevisionCreationRepository create;
private final RevisionEditRepository edit;
private final RevisionRestoreRepository restore;
private final String softwareName;
private final String softwareVersion;
private final int sizeX;
private final int sizeY;
private final List<PORUtil.PORVariableHolder> variables;
private final Language varLang;
PORVariablesParser(String path, Language varLang, DateTimeUserPair info, String studyId,
RevisionRepository revisions, RevisionRemoveRepository remove, RevisionCreationRepository create, RevisionEditRepository edit, RevisionRestoreRepository restore) {
this.varLang = varLang;
this.remove = remove;
this.revisions = revisions;
this.create = create;
this.edit = edit;
this.info = info;
this.studyId = studyId;
this.restore = restore;
PORReader reader = new PORReader();
PORFile por;
try {
por = reader.parse(path);
} catch(IOException ioe) {
ioe.printStackTrace();
Logger.error(getClass(), "IOException while reading POR-file with path " + path);
throw new UnsupportedOperationException("Could not parse POR file");
}
// Group variables to list
variables = PORUtil.groupVariables(por);
// Group answers under variables list
PORUtil.PORAnswerMapper visitor = new PORUtil.PORAnswerMapper(variables);
por.data.accept(visitor);
String[] software = por.getSoftware().split("\\s");
softwareVersion = software.length > 1 ? software[software.length-1] : "";
if(software.length == 1) {
softwareName = software[0];
} else if(software.length > 1) {
String temp = "";
for(int i = 0; i < software.length - 1; i++) {
if(i > 0) {
temp += " ";
}
temp += software[i];
}
softwareName = temp;
} else {
softwareName = "";
}
sizeX = por.data.sizeX();
sizeY = por.data.sizeY();
}
public ParseResult parse(RevisionData variablesData) {
ParseResult result = ParseResult.NO_CHANGES;
result = variablesBaseProperties(variablesData, result);
result = variablesParsing(variablesData, result);
return result;
}
// TODO: Can values be set to default irregardles of actual language?
private ParseResult variablesBaseProperties(RevisionData variablesData, ParseResult result) {
// Set software field
Pair<StatusCode, ValueDataField> fieldPair = variablesData.dataField(
ValueDataFieldCall.set(Fields.SOFTWARE, new Value(softwareName), Language.DEFAULT).setInfo(info));
result = checkResultForUpdate(fieldPair, result);
// Set softwareversion
fieldPair = variablesData.dataField(
ValueDataFieldCall.set(Fields.SOFTWAREVERSION, new Value(softwareVersion), Language.DEFAULT).setInfo(info));
result = checkResultForUpdate(fieldPair, result);
// Set varquantity field
fieldPair = variablesData.dataField(ValueDataFieldCall.set(Fields.VARQUANTITY, new Value(sizeX + ""), Language.DEFAULT).setInfo(info));
result = checkResultForUpdate(fieldPair, result);
// Set casequantity field
fieldPair = variablesData.dataField(ValueDataFieldCall.set(Fields.CASEQUANTITY, new Value(sizeY + ""), Language.DEFAULT).setInfo(info));
result = checkResultForUpdate(fieldPair, result);
// Update variables data to database if needed
if(result == ParseResult.REVISION_CHANGES) {
revisions.updateRevisionData(variablesData);
}
return result;
}
// All languages are handled in exactly the same way
private ParseResult variablesParsing(RevisionData variablesData, ParseResult result) {
List<RevisionData> variableRevisions = revisions.getVariableRevisionsOfVariables(variablesData.getKey().getId());
VariableParser parser = new VariableParser(info, varLang);
Logger.debug(getClass(), "Gathering entities for parsing");
long start = System.currentTimeMillis(); // Debug timer
List<MutablePair<RevisionData, PORUtil.PORVariableHolder>> listOfEntitiesAndHolders = new ArrayList<>();
for(PORUtil.PORVariableHolder variable : variables) {
RevisionData variableRevision = null;
for(Iterator<RevisionData> i = variableRevisions.iterator(); i.hasNext(); ) {
variableRevision = i.next();
Pair<StatusCode, ValueDataField> fieldPair = variableRevision.dataField(ValueDataFieldCall.get(Fields.VARID));
if(fieldPair.getRight().getActualValueFor(Language.DEFAULT).equals(studyId + "_" + parser.getVarName(variable))) {
i.remove();
break;
}
variableRevision = null;
}
listOfEntitiesAndHolders.add(new MutablePair<>(variableRevision, variable));
}
Logger.debug(getClass(), "Entities gathered. Took "+(System.currentTimeMillis()-start)+"ms");
/*ContainerDataField variableGroups = variablesData.dataField(ContainerDataFieldCall.get(Fields.VARGROUPS)).getRight();*/
// Perform removal
for(RevisionData variableRevision : variableRevisions) {
// All remaining rows in variableEntities should be removed since no variable was found for them in the current POR-file
// If removal of the revision returns SUCCESS_DRAFT this means that there's more revisions to remove and second call with new latest revision should clear out any remaining revisions.
if(RemoveResult.valueOf(remove.remove(variableRevision.getKey(), info).getResult()) == RemoveResult.SUCCESS_DRAFT) {
Pair<ReturnResult, RevisionData> dataPair = revisions.getRevisionData(variableRevision.getKey().getId().toString());
remove.remove(dataPair.getRight().getKey(), info);
}
}
// We need an updated variables data in case removals have changed it
variablesData = revisions.getRevisionData(variablesData.getKey().asCongregateKey()).getRight();
if(variablesData == null) {
// Something went terribly wrong
return resultCheck(result, ParseResult.DID_NOT_FIND_VARIABLES);
}
if(!listOfEntitiesAndHolders.isEmpty()) {
Pair<StatusCode, ValueDataField> studyField = variablesData.dataField(ValueDataFieldCall.get(Fields.STUDY));
Logger.debug(getClass(), listOfEntitiesAndHolders.size()+" variables to parse.");
// Debug variables
int counter = 0;
long timeSpent = 0L;
for(MutablePair<RevisionData, PORUtil.PORVariableHolder> pair : listOfEntitiesAndHolders) {
// Iterate through entity/holder pairs. There should always be a holder but missing entity indicates that this is a new variable.
// After all variables are handled there should be one non removed revisionable per variable in the current por-file.
// Each revisionable should have an open draft revision (this is a shortcut but it would require doing actual change checking for all variable content to guarantee that no
// unnecessary revisions are created. This is not required and so a new draft is provided per revisionable).
// Variables entity should have an open draft revision that includes references to all variables as well as non grouped references for all variables that previously were
// not in any groups.
start = System.currentTimeMillis(); // Debug timer
RevisionData variableData = pair.getLeft();
PORUtil.PORVariableHolder variable = pair.getRight();
String varName = parser.getVarName(variable);
String varId = studyId + "_" + varName;
if(variableData == null) {
RevisionCreateRequest request = new RevisionCreateRequest();
request.setType(ConfigurationType.STUDY_VARIABLE);
request.getParameters().put(Fields.STUDY, studyField.getRight().getActualValueFor(Language.DEFAULT));
request.getParameters().put(Fields.VARIABLESID, variablesData.getKey().getId().toString());
request.getParameters().put(Fields.VARNAME, varName);
request.getParameters().put(Fields.VARID, varId);
request.getParameters().put(Fields.LANGUAGE, varLang.toValue());
Pair<ReturnResult, RevisionData> dataPair = create.create(request);
if(dataPair.getLeft() != ReturnResult.REVISION_CREATED) {
Logger.error(getClass(), "Couldn't create new variable revisionable for study "+studyField.getRight().getActualValueFor(Language.DEFAULT)+" and variables "+variablesData.toString());
return resultCheck(result, ParseResult.COULD_NOT_CREATE_VARIABLES);
}
variableData = dataPair.getRight();
}
RevisionableInfo revInfo = revisions.getRevisionableInfo(variableData.getKey().getId()).getRight();
if(revInfo == null) {
continue;
}
// If we're trying to reuse an already removed variable then restore that variable. If this is not desirable then the variable should be explicitly removed from the database when removed from the por file.
if(revInfo.getRemoved()) {
restore.restore(variableData.getKey().getId());
}
if(variableData.getState() != RevisionState.DRAFT) {
Pair<OperationResponse, RevisionData> dataPair = edit.edit(variableData.getKey(), info);
if(!dataPair.getLeft().getResult().equals(ReturnResult.REVISION_CREATED.name())) {
Logger.error(getClass(), "Couldn't create new DRAFT revision for "+variableData.getKey().toString());
return resultCheck(result, ParseResult.COULD_NOT_CREATE_VARIABLE_DRAFT);
}
variableData = dataPair.getRight();
}
if(!AuthenticationUtil.isHandler(variableData)) {
variableData.setHandler(AuthenticationUtil.getUserName());
revisions.updateRevisionData(variableData);
}
// Merge variable to variable revision
ParseResult mergeResult = parser.mergeToData(variableData, variable);
if(mergeResult == ParseResult.REVISION_CHANGES) {
variableData.setSaved(DateTimeUserPair.build());
ReturnResult updateResult = revisions.updateRevisionData(variableData);
if(updateResult != ReturnResult.REVISION_UPDATE_SUCCESSFUL) {
Logger.error(getClass(), "Could not update revision data for "+variableData.toString()+" with result "+updateResult);
}
}
pair.setLeft(variableData);
// Insert row back to variables container or create row if not there before
counter++;
long end = System.currentTimeMillis()-start;
Logger.debug(getClass(), "Parsed variable in "+end+"ms. Still "+(listOfEntitiesAndHolders.size()-counter)+" variables to parse.");
timeSpent += end;
}
variablesData = revisions.getRevisionData(variablesData.getKey().asCongregateKey()).getRight();
if(variablesData == null) {
// Something went terribly wrong
return resultCheck(result, ParseResult.DID_NOT_FIND_VARIABLES);
}
// listOfEntitiesAndHolders should contain all variables in the POR-file as well as their existing revisionables. No revisionable is provided if it's a new variable
// Variables container and variable groups in STUDY_VARIABLES
ReferenceContainerDataField variablesContainer = variablesData.dataField(ReferenceContainerDataFieldCall.get(Fields.VARIABLES)).getRight();
if(variablesContainer == null) {
Pair<StatusCode, ReferenceContainerDataField> containerPair = variablesData.dataField(ReferenceContainerDataFieldCall.set(Fields.VARIABLES));
result = checkResultForUpdate(containerPair, result);
variablesContainer = containerPair.getRight();
}
if(variablesContainer == null) {
Logger.error(getClass(), "Missing variables container even though it should be present or created");
return resultCheck(result, ParseResult.NO_VARIABLES_CONTAINER);
}
// Reorder variables
List<ReferenceRow> varRows = gatherAndClear(variablesContainer);
// Sort existing rows in correct order;
for(MutablePair<RevisionData, PORUtil.PORVariableHolder> pair : listOfEntitiesAndHolders) {
popOrCreateAndInsertRowTo(variablesContainer, varRows, pair.getLeft().getKey().asCongregateKey(), pair.getLeft().getKey().asPartialKey(), variablesData.getChanges(), info);
}
// Add removed rows from varRows to preserve history information, after this there should be no rows remaining but if there are those should not be added in any case
for(ReferenceRow row : varRows) {
if(row.getRemoved()) {
variablesContainer.getReferences().add(row);
}
}
Logger.debug(getClass(), "Parsed variables in "+timeSpent+"ms");
// Final update so the above changes are also included
revisions.updateRevisionData(variablesData);
}
return result;
}
private static List<ReferenceRow> gatherAndClear(ReferenceContainerDataField field) {
if(field.getReferences() == null) {
return new ArrayList<>();
}
List<ReferenceRow> rows = new ArrayList<>(field.getReferences());
field.getReferences().clear();
return rows;
}
/**
* Helper method for handling and organising container rows.
* Searches given collection for a row including given value in given field.
* If row was not found then creates a new row and inserts it into provided container.
* If row was found but value does not equal given value then it is replaced with a new row.
* No change handling is necessary since some set operation should follow always after
* calling this method.
*
* @param target Target container where the row will be set
* @param rows Collection of rows to search through for correct existing row
* @param value Value to search for
* @param includes Value to search for
* @param changeMap Map where this rows containers should reside
* @return Either an existing or newly created ReferenceRow that has been inserted to the given container already
*/
private ReferenceRow popOrCreateAndInsertRowTo(ReferenceContainerDataField target, Collection<ReferenceRow> rows, String value, String includes, Map<String, Change> changeMap, DateTimeUserPair info) {
ReferenceRow row = popRowWithFieldIncludingValue(rows, includes);
if(row == null) {
row = ReferenceRow.build(target, new Value(value), info);
target.getReferences().add(row);
ChangeUtil.insertChange(changeMap, target, row);
} else if(!row.valueEquals(value)) {
ReferenceRow newRow = ReferenceRow.build(target, new Value(value), info);
target.getReferences().add(newRow);
ChangeUtil.insertChange(changeMap, target, newRow);
} else {
row.setRemoved(false);
target.getReferences().add(row);
}
return row;
}
/**
* Helper method for handling and organising container rows.
* Takes a collection of rows, finds a row based on a field and removes it from the given
* collection.
* Assumption is that the collection is not the actual rows list of a ContainerDataField
* but some other collection used for organising rows during operations.
*
* @param rows Collection of rows to search through
* @param value Value to be searched for, should be non empty string
* @return First ReferenceRow to match the given value, null if no row was found
*/
private ReferenceRow popRowWithFieldIncludingValue(Collection<ReferenceRow> rows, String value) {
for(Iterator<ReferenceRow> i = rows.iterator(); i.hasNext(); ) {
ReferenceRow row = i.next();
if(row.valueContaints(value)) {
i.remove();
return row;
}
}
return null;
}
}
| |
/*
* Copyright 2013 Splunk, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"): you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.splunk.modularinput;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import java.util.Date;
/**
* The {@code Event} class represents an event or fragment of an event to be written by this modular input to Splunk.
*
* To write an {@code Event} to an XML stream, call its {@code writeTo} method with an {@code XMLStreamWriter} object to write to.
* The {@code Event} must have at least the data field set or {@code writeTo} will throw a {@code MalformedDataException}. All other
* fields are optional. If you omit the time field, the {@code writeTo} method will fill in the current time when it is called.
*
* Typically, you will also want to call {@code setStanza} to specify which instance of the modular input kind this event
* should go to, {@code setTime} to set the timestamp, and {@code setSource}, {@code setHost}, and {@code setSourceType}
* specify where this event came from.
*/
public class Event {
protected Date time = null;
protected String data;
protected String source = null;
protected String sourceType = null;
protected String index = null;
protected String host = null;
protected boolean done = true;
protected boolean unbroken = true;
protected String stanza = null;
public Event() {}
// Helper method to write a single field to an XMLStreamWriter object only if it is not null.
protected void writeFieldTo(XMLStreamWriter out, String name, String value) throws XMLStreamException {
if (value != null) {
out.writeStartElement(name);
out.writeCharacters(value);
out.writeEndElement();
}
}
/**
* Writes this event to the given {@code XMLStreamWriter}.
*
* @param out The {@code XMLStreamWriter} to append to.
* @throws XMLStreamException if there is a problem in the {@code XMLStreamWriter}.
* @throws MalformedDataException if you have not specified data for this event.
*/
public void writeTo(XMLStreamWriter out) throws XMLStreamException, MalformedDataException {
if (data == null) {
throw new MalformedDataException("Events must have at least the data field set to be written to XML.");
}
out.writeStartElement("event");
if (getStanza() != null) {
out.writeAttribute("stanza", getStanza());
}
out.writeAttribute("unbroken", isUnbroken() ? "1" : "0");
if (this.time != null) {
writeFieldTo(out, "time", String.format("%.3f", time.getTime() / 1000D));
}
writeFieldTo(out, "source", getSource());
writeFieldTo(out, "sourcetype", getSourceType());
writeFieldTo(out, "index", getIndex());
writeFieldTo(out, "host", getHost());
writeFieldTo(out, "data", getData());
if (isDone()) {
out.writeStartElement("done");
out.writeEndElement();
}
out.writeEndElement();
out.writeCharacters("\r\n");
out.flush();
}
/**
* Gets a {@code java.util.Date} object giving the timestamp that should be sent with this event. If this field is null,
* Splunk will assign the time at which the event is indexed as its timestamp.
*
* @return A {@code java.util.Date} object giving the time assigned to this Event, or null if Splunk should apply a default
* timestamp.
*/
public Date getTime() {
return this.time;
}
/**
* Sets a {@code java.util.Date} object giving the timestamp that should be sent with this event. If this field is null,
* Splunk will assign the time at which the event is indexed as its timestamp.
*
* @param time The {@code java.util.Date} which should be used as this event's timestamp, or null to have Splunk use a
* default timestamp.
*/
public void setTime(Date time) {
this.time = time;
}
/**
* Gets the text of the event that Splunk should index.
*
* @return A String containing the event text.
*/
public String getData() {
return this.data;
}
/**
* Sets the text of the event that Splunk should index.
*
* @param data A String containing the event text.
*/
public void setData(String data) {
this.data = data;
}
/**
* Gets the file, service, or other producer that this {@code Event} comes from. For lines in log files, it is
* typically the full path to the log file. If it is omitted, Splunk will guess a sensible name for the source.
*
* @return A String giving the source of this event, or null to have Splunk guess.
*/
public String getSource() {
return this.source;
}
/**
* Sets the file, service, or other producer that this {@code Event} comes from. For lines in log files, it is
* typically the full path to the log file. If it is omitted, Splunk will guess a sensible name for the source.
*
* @param source A String to be used as the source of this event, or null to have Splunk guess.
*/
public void setSource(String source) {
this.source = source;
}
/**
* Gets a classification of this event. For example, all different web server logs might be assigned
* the same source type, or different source types can be assigned to distinct classes of events that all have
* the same source. If it is omitted, Splunk will guess a sensible name for the source type.
*
* @return The source type currently set on this event, or null if Splunk is to guess a source.
*/
public String getSourceType() {
return this.sourceType;
}
/**
* Sets a classification of this event. For example, all different web server logs might be assigned
* the same source type, or different source types can be assigned to distinct classes of events that all have
* the same source. If this field is omitted, Splunk will make a guess as to the source type.
*
* @param sourceType A String to use as the source type for this event, or null to have Splunk guess.
*/
public void setSourceType(String sourceType) {
this.sourceType = sourceType;
}
/**
* Gets an index field specifying which index Splunk should write this event to. If it is omitted, Splunk has a default
* index where events will be written.
*
* @return The index this event is specified to write to, or null if it will be written to the default index.
*/
public String getIndex() {
return this.index;
}
/**
* Sets an index field specifying which index Splunk should write this event to. If it is omitted, Splunk has a default
* index where events will be written.
*
* @param index The name of the index to write to, or null to have Splunk write to the default index.
*/
public void setIndex(String index) {
this.index = index;
}
/**
* Gets a host specifying the name of the network host on which this event was produced. If it is omitted, Splunk will use
* the host from which it directly received the event.
*
* @return A String giving the host name of the event source, or null to use the host Splunk receives the event from.
*/
public String getHost() {
return this.host;
}
/**
* Sets a host specifying the name of the network host on which this event was produced. If it is omitted, Splunk will use
* the host from which it directly received the event.
*
* @param host A String giving the host name of the event source, or null to use the host Splunk receives
* the event from.
*/
public void setHost(String host) {
this.host = host;
}
/**
* Sets a value indicating whether this is the last piece of an event broken across multiple {@code Event} objects.
*
* Splunk allows events from modular inputs to be sent in pieces. For example, if lines of an event become available
* one at a time, they can be sent (in events with {@code setUnbroken(false)} called on them) as hunks. At the end of the
* event, you must manually tell Splunk to break after this hunk by setting done to true. Then the next event
* received will be taken to be part of another event.
*
* By default, done is {@code true} and unbroken is {@code true}, so if you do not touch these fields, you will send one complete
* event per {@code Event} object.
*
* @param done Is this the last hunk of an event broken across multiple {@code Event} objects?
*/
public void setDone(boolean done) {
this.done = done;
}
/**
* Gets a value indicating whether this is the last piece of an event broken across multiple {@code Event} objects.
*
* Splunk allows events from modular inputs to be sent in pieces. For example, if lines of an event become available
* one at a time, they can be sent (in events with setUnbroken(false) called on them) as hunks. At the end of the
* event, you must manually tell Splunk to break after this hunk by setting done to true. Then the next event
* received will be taken to be part of another event.
*
* By default, done is {@code true} and unbroken is {@code true}, so if you do not touch these fields, you will send one complete
* event per {@code Event} object.
*
* @return Is this the last hunk of an event broken across multiple {@code Event} objects?
*/
public boolean isDone() {
return this.done;
}
/**
* Sets a value indicating whether this event is completely encapsulated in this {@code Event} object.
*
* Splunk allows events from modular inputs to be sent in pieces. If unbroken is set to {@code true}, then this event is
* assumed to be a complete event, not a piece of one. By default, unbroken is {@code true}. If you set unbroken to {@code false},
* you need to call {@code setDone(true)} on the last hunk of the complete event for Splunk to know to terminate it.
*
* @param unbroken Is this event completely encapsulated in this {@code Event} object?
*/
public void setUnbroken(boolean unbroken) {
this.unbroken = unbroken;
}
/**
* Gets a value indicating whether this event is completely encapsulated in this {@code Event} object.
*
* Splunk allows events from modular inputs to be sent in pieces. If unbroken is set to {@code true}, then this event is
* assumed to be a complete event, not a piece of one. By default, unbroken is {@code true}. If you set unbroken to {@code false},
* you need to call {@code setDone(true)} on the last hunk of the complete event for Splunk to know to terminate it.
*
* @return Is this event completely encapsulated in this {@code Event} object?
*/
public boolean isUnbroken() {
return this.unbroken;
}
/**
* Gets the name of the input this event should be sent to.
*
* A single modular input script can feed events to multiple instances of the modular input. In this case, each
* event must be marked with the name of the input it should be sent to. This is also the name of the configuration
* stanza that describes that input.
*
* @return The name of the input this event should be sent to.
*/
public String getStanza() {
return this.stanza;
}
/**
* Sets the name of the input this event should be sent to.
*
* A single modular input script can feed events to multiple instances of the modular input. In this case, each
* event must be marked with the name of the input it should be sent to. This is also the name of the configuration
* stanza that describes that input.
*
* @param stanza The name of the input this event should be sent to.
*/
public void setStanza(String stanza) {
this.stanza = stanza;
}
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.gmf.esb.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.util.ResourceLocator;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.ItemProviderAdapter;
import org.eclipse.emf.edit.provider.ViewerNotification;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
import org.wso2.developerstudio.eclipse.gmf.esb.ThrottlePolicyConfiguration;
import org.wso2.developerstudio.eclipse.gmf.esb.ThrottlePolicyType;
/**
* This is the item provider adapter for a {@link org.wso2.developerstudio.eclipse.gmf.esb.ThrottlePolicyConfiguration} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class ThrottlePolicyConfigurationItemProvider
extends ItemProviderAdapter
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ThrottlePolicyConfigurationItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
addPolicyTypePropertyDescriptor(object);
addMaxConcurrentAccessCountPropertyDescriptor(object);
}
return itemPropertyDescriptors;
}
/**
* This adds a property descriptor for the Policy Type feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addPolicyTypePropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ThrottlePolicyConfiguration_policyType_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ThrottlePolicyConfiguration_policyType_feature", "_UI_ThrottlePolicyConfiguration_type"),
EsbPackage.Literals.THROTTLE_POLICY_CONFIGURATION__POLICY_TYPE,
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
}
/**
* This adds a property descriptor for the Max Concurrent Access Count feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void addMaxConcurrentAccessCountPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_ThrottlePolicyConfiguration_maxConcurrentAccessCount_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_ThrottlePolicyConfiguration_maxConcurrentAccessCount_feature", "_UI_ThrottlePolicyConfiguration_type"),
EsbPackage.Literals.THROTTLE_POLICY_CONFIGURATION__MAX_CONCURRENT_ACCESS_COUNT,
true,
false,
false,
ItemPropertyDescriptor.INTEGRAL_VALUE_IMAGE,
null,
null));
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(EsbPackage.Literals.THROTTLE_POLICY_CONFIGURATION__POLICY_KEY);
childrenFeatures.add(EsbPackage.Literals.THROTTLE_POLICY_CONFIGURATION__POLICY_ENTRIES);
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns ThrottlePolicyConfiguration.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/ThrottlePolicyConfiguration"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
ThrottlePolicyType labelValue = ((ThrottlePolicyConfiguration)object).getPolicyType();
String label = labelValue == null ? null : labelValue.toString();
return label == null || label.length() == 0 ?
getString("_UI_ThrottlePolicyConfiguration_type") :
getString("_UI_ThrottlePolicyConfiguration_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(ThrottlePolicyConfiguration.class)) {
case EsbPackage.THROTTLE_POLICY_CONFIGURATION__POLICY_TYPE:
case EsbPackage.THROTTLE_POLICY_CONFIGURATION__MAX_CONCURRENT_ACCESS_COUNT:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true));
return;
case EsbPackage.THROTTLE_POLICY_CONFIGURATION__POLICY_KEY:
case EsbPackage.THROTTLE_POLICY_CONFIGURATION__POLICY_ENTRIES:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(EsbPackage.Literals.THROTTLE_POLICY_CONFIGURATION__POLICY_KEY,
EsbFactory.eINSTANCE.createRegistryKeyProperty()));
newChildDescriptors.add
(createChildParameter
(EsbPackage.Literals.THROTTLE_POLICY_CONFIGURATION__POLICY_ENTRIES,
EsbFactory.eINSTANCE.createThrottlePolicyEntry()));
}
/**
* Return the resource locator for this item provider's resources.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public ResourceLocator getResourceLocator() {
return EsbEditPlugin.INSTANCE;
}
}
| |
package com.capitalone.dashboard.collector;
import com.capitalone.dashboard.model.Environment;
import com.capitalone.dashboard.model.EnvironmentComponent;
import com.capitalone.dashboard.model.EnvironmentStatus;
import com.capitalone.dashboard.model.UDeployApplication;
import com.capitalone.dashboard.model.UDeployEnvResCompData;
import com.capitalone.dashboard.repository.EnvironmentComponentRepository;
import com.capitalone.dashboard.util.Supplier;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestClientException;
import org.springframework.web.client.RestOperations;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
@Component
public class DefaultUDeployClient implements UDeployClient {
private static final Log LOG = LogFactory
.getLog(DefaultUDeployClient.class);
private final UDeploySettings uDeploySettings;
private final RestOperations restOperations;
@Autowired
public DefaultUDeployClient(UDeploySettings uDeploySettings,
Supplier<RestOperations> restOperationsSupplier) {
this.uDeploySettings = uDeploySettings;
this.restOperations = restOperationsSupplier.get();
}
@Override
public List<UDeployApplication> getApplications(String instanceUrl) {
List<UDeployApplication> applications = new ArrayList<>();
for (Object item : paresAsArray(makeRestCall(instanceUrl,
"deploy/application"))) {
JSONObject jsonObject = (JSONObject) item;
UDeployApplication application = new UDeployApplication();
application.setInstanceUrl(instanceUrl);
application.setApplicationName(str(jsonObject, "name"));
application.setApplicationId(str(jsonObject, "id"));
applications.add(application);
}
return applications;
}
@Override
public List<Environment> getEnvironments(UDeployApplication application) {
List<Environment> environments = new ArrayList<>();
String url = "deploy/application/" + application.getApplicationId()
+ "/environments/false";
for (Object item : paresAsArray(makeRestCall(
application.getInstanceUrl(), url))) {
JSONObject jsonObject = (JSONObject) item;
environments.add(new Environment(str(jsonObject, "id"), str(
jsonObject, "name")));
}
return environments;
}
@Override
public List<EnvironmentComponent> getEnvironmentComponents(
UDeployApplication application, Environment environment) {
List<EnvironmentComponent> components = new ArrayList<>();
String url = "deploy/environment/" + environment.getId()
+ "/latestDesiredInventory";
try {
for (Object item : paresAsArray(makeRestCall(
application.getInstanceUrl(), url))) {
JSONObject jsonObject = (JSONObject) item;
JSONObject versionObject = (JSONObject) jsonObject
.get("version");
JSONObject componentObject = (JSONObject) jsonObject
.get("component");
JSONObject complianceObject = (JSONObject) jsonObject
.get("compliancy");
EnvironmentComponent component = new EnvironmentComponent();
component.setEnvironmentName(environment.getName());
component.setEnvironmentUrl(normalizeUrl(
application.getInstanceUrl(), "/#environment/"
+ environment.getId()));
component.setComponentID(str(componentObject, "id"));
component.setComponentName(str(componentObject, "name"));
component.setComponentVersion(str(versionObject, "name"));
component.setDeployed(complianceObject.get("correctCount")
.equals(complianceObject.get("desiredCount")));
component.setAsOfDate(date(jsonObject, "date"));
components.add(component);
}
} catch (NullPointerException npe) {
LOG.info("No Environment data found, No components deployed");
}
return components;
}
// Called by DefaultEnvironmentStatusUpdater
@Override
public List<UDeployEnvResCompData> getEnvironmentResourceStatusData(
UDeployApplication application, Environment environment) {
List<UDeployEnvResCompData> environmentStatuses = new ArrayList<>();
String urlNonCompliantResources = "deploy/environment/"
+ environment.getId() + "/noncompliantResources";
String urlAllResources = "deploy/environment/" + environment.getId()
+ "/resources";
ResponseEntity<String> nonCompliantResourceResponse = makeRestCall(
application.getInstanceUrl(), urlNonCompliantResources);
JSONArray nonCompliantResourceJSON = paresAsArray(nonCompliantResourceResponse);
ResponseEntity<String> allResourceResponse = makeRestCall(
application.getInstanceUrl(), urlAllResources);
JSONArray allResourceJSON = paresAsArray(allResourceResponse);
for (Object item : allResourceJSON) {
JSONObject jsonObject = (JSONObject) item;
if (jsonObject != null) {
JSONObject parentObject = (JSONObject) jsonObject.get("parent");
UDeployEnvResCompData data = new UDeployEnvResCompData();
data.setEnvironmentName(environment.getName());
data.setCollectorItemId(application.getId());
String resourceName = str(jsonObject, "name");
data.setResourceName(resourceName);
boolean status = "ONLINE".equalsIgnoreCase(str(parentObject,
"status"));
data.setOnline(status);
JSONArray jsonChildren = (JSONArray) jsonObject.get("children");
if ((jsonChildren != null) && (jsonChildren.size() > 0)) {
for (Object children : jsonChildren) {
JSONObject childrenObject = (JSONObject) children;
String componentName = (String) childrenObject
.get("name");
data.setComponentName(componentName);
JSONArray jsonVersions = (JSONArray) childrenObject
.get("versions");
String version = "UNKNOWN";
data.setDeployed(false);
if ((jsonVersions != null) && (jsonVersions.size() > 0)) {
JSONObject versionObject = (JSONObject) jsonVersions
.get(0);
version = (String) versionObject.get("name");
data.setAsOfDate(date(versionObject, "created"));
data.setDeployed(true);
} else {
// get it from non-compliant resource list
nonCompliantSearchLoop: for (Object nonCompItem : nonCompliantResourceJSON) {
JSONArray nonCompChildrenArray = (JSONArray) ((JSONObject) nonCompItem)
.get("children");
for (Object nonCompChildItem : nonCompChildrenArray) {
JSONObject nonCompChildObject = (JSONObject) nonCompChildItem;
JSONObject nonCompVersonObject = (JSONObject) nonCompChildObject
.get("version");
if (nonCompVersonObject != null) {
JSONObject nonCompComponentObject = (JSONObject) nonCompVersonObject
.get("component");
if ((nonCompComponentObject != null)
&& (componentName
.equalsIgnoreCase((String) nonCompComponentObject
.get("name")))) {
version = (String) nonCompVersonObject
.get("name");
data.setAsOfDate(date(
nonCompVersonObject,
"created"));
data.setDeployed(false);
break nonCompliantSearchLoop;
}
}
}
}
}
data.setComponentVersion(version);
environmentStatuses.add(data);
}
}
}
}
return environmentStatuses;
}
// ////// Helpers
private ResponseEntity<String> makeRestCall(String instanceUrl,
String endpoint) {
String url = normalizeUrl(instanceUrl, "/rest/" + endpoint);
ResponseEntity<String> response = null;
try {
response = restOperations.exchange(url, HttpMethod.GET,
new HttpEntity<>(createHeaders()), String.class);
} catch (RestClientException re) {
LOG.error("Error with REST url: " + url);
LOG.error(re.getMessage());
}
return response;
}
private String normalizeUrl(String instanceUrl, String remainder) {
return StringUtils.removeEnd(instanceUrl, "/") + remainder;
}
private HttpHeaders createHeaders() {
return new HttpHeaders() {
{
String auth = uDeploySettings.getUsername() + ":"
+ uDeploySettings.getPassword();
byte[] encodedAuth = Base64.encodeBase64(auth.getBytes(Charset
.forName("US-ASCII")));
String authHeader = "Basic " + new String(encodedAuth);
set("Authorization", authHeader);
}
};
}
private JSONArray paresAsArray(ResponseEntity<String> response) {
if (response == null)
return new JSONArray();
try {
return (JSONArray) new JSONParser().parse(response.getBody());
} catch (ParseException pe) {
LOG.debug(response.getBody());
LOG.error(pe.getMessage());
}
return new JSONArray();
}
private String str(JSONObject json, String key) {
Object value = json.get(key);
return value == null ? null : value.toString();
}
private long date(JSONObject jsonObject, String key) {
Object value = jsonObject.get(key);
return value == null ? 0 : (long) value;
}
}
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis.skylark;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Interner;
import com.google.devtools.build.lib.actions.ActionKeyContext;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.ArtifactExpander;
import com.google.devtools.build.lib.actions.CommandLine;
import com.google.devtools.build.lib.actions.CommandLineExpansionException;
import com.google.devtools.build.lib.actions.CommandLineItem;
import com.google.devtools.build.lib.actions.FilesetManifest;
import com.google.devtools.build.lib.actions.FilesetManifest.RelativeSymlinkBehavior;
import com.google.devtools.build.lib.actions.SingleStringArgFormatter;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.concurrent.BlazeInterners;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.events.NullEventHandler;
import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec;
import com.google.devtools.build.lib.skylarkbuildapi.FileApi;
import com.google.devtools.build.lib.skylarkbuildapi.FileRootApi;
import com.google.devtools.build.lib.skylarkinterface.SkylarkPrinter;
import com.google.devtools.build.lib.syntax.BaseFunction;
import com.google.devtools.build.lib.syntax.EvalException;
import com.google.devtools.build.lib.syntax.Mutability;
import com.google.devtools.build.lib.syntax.Printer;
import com.google.devtools.build.lib.syntax.Runtime;
import com.google.devtools.build.lib.syntax.SkylarkList;
import com.google.devtools.build.lib.syntax.StarlarkSemantics;
import com.google.devtools.build.lib.syntax.StarlarkThread;
import com.google.devtools.build.lib.util.Fingerprint;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.IllegalFormatException;
import java.util.List;
import java.util.UUID;
import java.util.function.Consumer;
import javax.annotation.Nullable;
/** Supports ctx.actions.args() from Skylark. */
@AutoCodec
public class SkylarkCustomCommandLine extends CommandLine {
private final StarlarkSemantics starlarkSemantics;
private final ImmutableList<Object> arguments;
private static final Joiner LINE_JOINER = Joiner.on("\n").skipNulls();
private static final Joiner FIELD_JOINER = Joiner.on(": ").skipNulls();
@AutoCodec
static final class VectorArg {
private static final Interner<VectorArg> interner = BlazeInterners.newStrongInterner();
private static final int HAS_LOCATION = 1;
private static final int HAS_MAP_ALL = 1 << 1;
private static final int HAS_MAP_EACH = 1 << 2;
private static final int IS_NESTED_SET = 1 << 3;
private static final int EXPAND_DIRECTORIES = 1 << 4;
private static final int UNIQUIFY = 1 << 5;
private static final int OMIT_IF_EMPTY = 1 << 6;
private static final int HAS_ARG_NAME = 1 << 7;
private static final int HAS_FORMAT_EACH = 1 << 8;
private static final int HAS_BEFORE_EACH = 1 << 9;
private static final int HAS_JOIN_WITH = 1 << 10;
private static final int HAS_FORMAT_JOINED = 1 << 11;
private static final int HAS_TERMINATE_WITH = 1 << 12;
private static final UUID EXPAND_DIRECTORIES_UUID =
UUID.fromString("9d7520d2-a187-11e8-98d0-529269fb1459");
private static final UUID UNIQUIFY_UUID =
UUID.fromString("7f494c3e-faea-4498-a521-5d3bc6ee19eb");
private static final UUID OMIT_IF_EMPTY_UUID =
UUID.fromString("923206f1-6474-4a8f-b30f-4dd3143622e6");
private static final UUID ARG_NAME_UUID =
UUID.fromString("2bc00382-7199-46ec-ad52-1556577cde1a");
private static final UUID FORMAT_EACH_UUID =
UUID.fromString("8e974aec-df07-4a51-9418-f4c1172b4045");
private static final UUID BEFORE_EACH_UUID =
UUID.fromString("f7e101bc-644d-4277-8562-6515ad55a988");
private static final UUID JOIN_WITH_UUID =
UUID.fromString("c227dbd3-edad-454e-bc8a-c9b5ba1c38a3");
private static final UUID FORMAT_JOINED_UUID =
UUID.fromString("528af376-4233-4c27-be4d-b0ff24ed68db");
private static final UUID TERMINATE_WITH_UUID =
UUID.fromString("a4e5e090-0dbd-4d41-899a-77cfbba58655");
private final int features;
private VectorArg(int features) {
this.features = features;
}
@AutoCodec.VisibleForSerialization
@AutoCodec.Instantiator
static VectorArg create(int features) {
return interner.intern(new VectorArg(features));
}
private static void push(ImmutableList.Builder<Object> arguments, Builder arg) {
int features = 0;
features |= arg.mapAll != null ? HAS_MAP_ALL : 0;
features |= arg.mapEach != null ? HAS_MAP_EACH : 0;
features |= arg.nestedSet != null ? IS_NESTED_SET : 0;
features |= arg.expandDirectories ? EXPAND_DIRECTORIES : 0;
features |= arg.uniquify ? UNIQUIFY : 0;
features |= arg.omitIfEmpty ? OMIT_IF_EMPTY : 0;
features |= arg.argName != null ? HAS_ARG_NAME : 0;
features |= arg.formatEach != null ? HAS_FORMAT_EACH : 0;
features |= arg.beforeEach != null ? HAS_BEFORE_EACH : 0;
features |= arg.joinWith != null ? HAS_JOIN_WITH : 0;
features |= arg.formatJoined != null ? HAS_FORMAT_JOINED : 0;
features |= arg.terminateWith != null ? HAS_TERMINATE_WITH : 0;
boolean hasLocation =
arg.location != null
&& (features & (HAS_FORMAT_EACH | HAS_FORMAT_JOINED | HAS_MAP_ALL | HAS_MAP_EACH))
!= 0;
features |= hasLocation ? HAS_LOCATION : 0;
Preconditions.checkState(
(features & (HAS_MAP_ALL | HAS_MAP_EACH)) != (HAS_MAP_ALL | HAS_MAP_EACH),
"Cannot use both map_all and map_each");
VectorArg vectorArg = VectorArg.create(features);
arguments.add(vectorArg);
if (hasLocation) {
arguments.add(arg.location);
}
if (arg.mapAll != null) {
arguments.add(arg.mapAll);
}
if (arg.mapEach != null) {
arguments.add(arg.mapEach);
}
if (arg.nestedSet != null) {
arguments.add(arg.nestedSet);
} else {
List<?> list = arg.list;
int count = list.size();
arguments.add(count);
for (int i = 0; i < count; ++i) {
arguments.add(list.get(i));
}
}
if (arg.argName != null) {
arguments.add(arg.argName);
}
if (arg.formatEach != null) {
arguments.add(arg.formatEach);
}
if (arg.beforeEach != null) {
arguments.add(arg.beforeEach);
}
if (arg.joinWith != null) {
arguments.add(arg.joinWith);
}
if (arg.formatJoined != null) {
arguments.add(arg.formatJoined);
}
if (arg.terminateWith != null) {
arguments.add(arg.terminateWith);
}
}
private int eval(
List<Object> arguments,
int argi,
ImmutableList.Builder<String> builder,
@Nullable ArtifactExpander artifactExpander,
StarlarkSemantics starlarkSemantics)
throws CommandLineExpansionException {
final Location location =
((features & HAS_LOCATION) != 0) ? (Location) arguments.get(argi++) : null;
final List<Object> originalValues;
BaseFunction mapAll =
((features & HAS_MAP_ALL) != 0) ? (BaseFunction) arguments.get(argi++) : null;
BaseFunction mapEach =
((features & HAS_MAP_EACH) != 0) ? (BaseFunction) arguments.get(argi++) : null;
if ((features & IS_NESTED_SET) != 0) {
@SuppressWarnings("unchecked")
NestedSet<Object> nestedSet = (NestedSet<Object>) arguments.get(argi++);
originalValues = nestedSet.toList();
} else {
int count = (Integer) arguments.get(argi++);
originalValues = arguments.subList(argi, argi + count);
argi += count;
}
List<Object> expandedValues = originalValues;
if (artifactExpander != null && (features & EXPAND_DIRECTORIES) != 0) {
if (hasDirectory(originalValues)) {
expandedValues = expandDirectories(artifactExpander, originalValues);
}
}
List<String> stringValues;
if (mapEach != null) {
stringValues = new ArrayList<>(expandedValues.size());
applyMapEach(mapEach, expandedValues, stringValues::add, location, starlarkSemantics);
} else if (mapAll != null) {
Object result = applyMapFn(mapAll, expandedValues, location, starlarkSemantics);
if (!(result instanceof List)) {
throw new CommandLineExpansionException(
errorMessage(
"map_fn must return a list, got " + result.getClass().getSimpleName(),
location,
null));
}
List<?> resultAsList = (List) result;
if (resultAsList.size() != expandedValues.size()) {
throw new CommandLineExpansionException(
errorMessage(
String.format(
"map_fn must return a list of the same length as the input. "
+ "Found list of length %d, expected %d.",
resultAsList.size(), expandedValues.size()),
location,
null));
}
int count = resultAsList.size();
stringValues = new ArrayList<>(count);
// map_fn contract doesn't guarantee that the values returned are strings,
// so convert here
for (int i = 0; i < count; ++i) {
stringValues.add(CommandLineItem.expandToCommandLine(resultAsList.get(i)));
}
} else {
int count = expandedValues.size();
stringValues = new ArrayList<>(expandedValues.size());
for (int i = 0; i < count; ++i) {
stringValues.add(CommandLineItem.expandToCommandLine(expandedValues.get(i)));
}
}
// It's safe to uniquify at this stage, any transformations after this
// will ensure continued uniqueness of the values
if ((features & UNIQUIFY) != 0) {
HashSet<String> seen = new HashSet<>(stringValues.size());
int count = stringValues.size();
int addIndex = 0;
for (int i = 0; i < count; ++i) {
String val = stringValues.get(i);
if (seen.add(val)) {
stringValues.set(addIndex++, val);
}
}
stringValues = stringValues.subList(0, addIndex);
}
boolean isEmptyAndShouldOmit = stringValues.isEmpty() && (features & OMIT_IF_EMPTY) != 0;
if ((features & HAS_ARG_NAME) != 0) {
String argName = (String) arguments.get(argi++);
if (!isEmptyAndShouldOmit) {
builder.add(argName);
}
}
if ((features & HAS_FORMAT_EACH) != 0) {
String formatStr = (String) arguments.get(argi++);
Formatter formatter = Formatter.get(location, starlarkSemantics);
try {
int count = stringValues.size();
for (int i = 0; i < count; ++i) {
stringValues.set(i, formatter.format(formatStr, stringValues.get(i)));
}
} catch (IllegalFormatException e) {
throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, null));
}
}
if ((features & HAS_BEFORE_EACH) != 0) {
String beforeEach = (String) arguments.get(argi++);
int count = stringValues.size();
for (int i = 0; i < count; ++i) {
builder.add(beforeEach);
builder.add(stringValues.get(i));
}
} else if ((features & HAS_JOIN_WITH) != 0) {
String joinWith = (String) arguments.get(argi++);
String formatJoined =
((features & HAS_FORMAT_JOINED) != 0) ? (String) arguments.get(argi++) : null;
if (!isEmptyAndShouldOmit) {
String result = Joiner.on(joinWith).join(stringValues);
if (formatJoined != null) {
Formatter formatter = Formatter.get(location, starlarkSemantics);
try {
result = formatter.format(formatJoined, result);
} catch (IllegalFormatException e) {
throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, null));
}
}
builder.add(result);
}
} else {
builder.addAll(stringValues);
}
if ((features & HAS_TERMINATE_WITH) != 0) {
String terminateWith = (String) arguments.get(argi++);
if (!isEmptyAndShouldOmit) {
builder.add(terminateWith);
}
}
return argi;
}
private static boolean hasDirectory(List<Object> originalValues) {
int n = originalValues.size();
for (int i = 0; i < n; ++i) {
Object object = originalValues.get(i);
if (isDirectory(object)) {
return true;
}
}
return false;
}
private static boolean isDirectory(Object object) {
return ((object instanceof Artifact) && ((Artifact) object).isDirectory());
}
private static List<Object> expandDirectories(
Artifact.ArtifactExpander artifactExpander, List<Object> originalValues)
throws CommandLineExpansionException {
List<Object> expandedValues;
int n = originalValues.size();
expandedValues = new ArrayList<>(n);
for (int i = 0; i < n; ++i) {
Object object = originalValues.get(i);
if (isDirectory(object)) {
Artifact artifact = (Artifact) object;
if (artifact.isTreeArtifact()) {
artifactExpander.expand((Artifact) object, expandedValues);
} else if (artifact.isFileset()) {
expandFileset(artifactExpander, artifact, expandedValues);
} else {
throw new AssertionError("Unknown artifact type.");
}
} else {
expandedValues.add(object);
}
}
return expandedValues;
}
private static void expandFileset(
Artifact.ArtifactExpander artifactExpander, Artifact fileset, List<Object> expandedValues)
throws CommandLineExpansionException {
try {
FilesetManifest filesetManifest =
FilesetManifest.constructFilesetManifest(
artifactExpander.getFileset(fileset),
fileset.getExecPath(),
RelativeSymlinkBehavior.IGNORE);
for (PathFragment relativePath : filesetManifest.getEntries().keySet()) {
expandedValues.add(new FilesetSymlinkFile(fileset, relativePath));
}
} catch (IOException e) {
throw new CommandLineExpansionException("Could not expand fileset: " + e.getMessage());
}
}
private int addToFingerprint(
List<Object> arguments,
int argi,
ActionKeyContext actionKeyContext,
Fingerprint fingerprint,
StarlarkSemantics starlarkSemantics)
throws CommandLineExpansionException {
if ((features & HAS_MAP_ALL) != 0) {
return addToFingerprintLegacy(arguments, argi, fingerprint, starlarkSemantics);
}
final Location location =
((features & HAS_LOCATION) != 0) ? (Location) arguments.get(argi++) : null;
BaseFunction mapEach =
((features & HAS_MAP_EACH) != 0) ? (BaseFunction) arguments.get(argi++) : null;
if ((features & IS_NESTED_SET) != 0) {
NestedSet<?> values = (NestedSet) arguments.get(argi++);
if (mapEach != null) {
CommandLineItem.MapFn<Object> commandLineItemMapFn =
new CommandLineItemMapEachAdaptor(mapEach, location, starlarkSemantics);
try {
actionKeyContext.addNestedSetToFingerprint(commandLineItemMapFn, fingerprint, values);
} catch (UncheckedCommandLineExpansionException e) {
// We wrap the CommandLineExpansionException below, unwrap here
throw e.cause;
}
} else {
actionKeyContext.addNestedSetToFingerprint(fingerprint, values);
}
} else {
int count = (Integer) arguments.get(argi++);
final List<Object> originalValues = arguments.subList(argi, argi + count);
argi += count;
if (mapEach != null) {
List<String> stringValues = new ArrayList<>(count);
applyMapEach(mapEach, originalValues, stringValues::add, location, starlarkSemantics);
for (String s : stringValues) {
fingerprint.addString(s);
}
} else {
for (int i = 0; i < count; ++i) {
fingerprint.addString(CommandLineItem.expandToCommandLine(originalValues.get(i)));
}
}
}
if ((features & EXPAND_DIRECTORIES) != 0) {
fingerprint.addUUID(EXPAND_DIRECTORIES_UUID);
}
if ((features & UNIQUIFY) != 0) {
fingerprint.addUUID(UNIQUIFY_UUID);
}
if ((features & OMIT_IF_EMPTY) != 0) {
fingerprint.addUUID(OMIT_IF_EMPTY_UUID);
}
if ((features & HAS_ARG_NAME) != 0) {
String argName = (String) arguments.get(argi++);
fingerprint.addUUID(ARG_NAME_UUID);
fingerprint.addString(argName);
}
if ((features & HAS_FORMAT_EACH) != 0) {
String formatStr = (String) arguments.get(argi++);
fingerprint.addUUID(FORMAT_EACH_UUID);
fingerprint.addString(formatStr);
}
if ((features & HAS_BEFORE_EACH) != 0) {
String beforeEach = (String) arguments.get(argi++);
fingerprint.addUUID(BEFORE_EACH_UUID);
fingerprint.addString(beforeEach);
} else if ((features & HAS_JOIN_WITH) != 0) {
String joinWith = (String) arguments.get(argi++);
fingerprint.addUUID(JOIN_WITH_UUID);
fingerprint.addString(joinWith);
if ((features & HAS_FORMAT_JOINED) != 0) {
String formatJoined = (String) arguments.get(argi++);
fingerprint.addUUID(FORMAT_JOINED_UUID);
fingerprint.addString(formatJoined);
}
}
if ((features & HAS_TERMINATE_WITH) != 0) {
String terminateWith = (String) arguments.get(argi++);
fingerprint.addUUID(TERMINATE_WITH_UUID);
fingerprint.addString(terminateWith);
}
return argi;
}
private int addToFingerprintLegacy(
List<Object> arguments,
int argi,
Fingerprint fingerprint,
StarlarkSemantics starlarkSemantics)
throws CommandLineExpansionException {
ImmutableList.Builder<String> builder = ImmutableList.builder();
argi = eval(arguments, argi, builder, null, starlarkSemantics);
for (String s : builder.build()) {
fingerprint.addString(s);
}
return argi;
}
static class Builder {
@Nullable private final SkylarkList<?> list;
@Nullable private final NestedSet<?> nestedSet;
private Location location;
public String argName;
private boolean expandDirectories;
private BaseFunction mapAll;
private BaseFunction mapEach;
private String formatEach;
private String beforeEach;
private String joinWith;
private String formatJoined;
private boolean omitIfEmpty;
private boolean uniquify;
private String terminateWith;
Builder(SkylarkList<?> list) {
this.list = list;
this.nestedSet = null;
}
Builder(NestedSet<?> nestedSet) {
this.list = null;
this.nestedSet = nestedSet;
}
Builder setLocation(Location location) {
this.location = location;
return this;
}
Builder setArgName(String argName) {
this.argName = argName;
return this;
}
Builder setExpandDirectories(boolean expandDirectories) {
this.expandDirectories = expandDirectories;
return this;
}
Builder setMapAll(BaseFunction mapAll) {
this.mapAll = mapAll;
return this;
}
Builder setMapEach(BaseFunction mapEach) {
this.mapEach = mapEach;
return this;
}
Builder setFormatEach(String format) {
this.formatEach = format;
return this;
}
Builder setBeforeEach(String beforeEach) {
this.beforeEach = beforeEach;
return this;
}
Builder setJoinWith(String joinWith) {
this.joinWith = joinWith;
return this;
}
Builder setFormatJoined(String formatJoined) {
this.formatJoined = formatJoined;
return this;
}
Builder omitIfEmpty(boolean omitIfEmpty) {
this.omitIfEmpty = omitIfEmpty;
return this;
}
Builder uniquify(boolean uniquify) {
this.uniquify = uniquify;
return this;
}
Builder setTerminateWith(String terminateWith) {
this.terminateWith = terminateWith;
return this;
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
VectorArg vectorArg = (VectorArg) o;
return features == vectorArg.features;
}
@Override
public int hashCode() {
return Objects.hashCode(features);
}
}
@AutoCodec
static final class ScalarArg {
private static final Interner<ScalarArg> interner = BlazeInterners.newStrongInterner();
private static final UUID FORMAT_UUID = UUID.fromString("8cb96642-a235-4fe0-b3ed-ebfdae8a0bd9");
private final boolean hasFormat;
private final boolean hasMapFn;
private final boolean hasLocation;
private ScalarArg(boolean hasFormat, boolean hasMapFn, boolean hasLocation) {
this.hasFormat = hasFormat;
this.hasMapFn = hasMapFn;
this.hasLocation = hasLocation;
}
@AutoCodec.VisibleForSerialization
@AutoCodec.Instantiator
static ScalarArg create(boolean hasFormat, boolean hasMapFn, boolean hasLocation) {
return interner.intern(new ScalarArg(hasFormat, hasMapFn, hasLocation));
}
private static void push(ImmutableList.Builder<Object> arguments, Builder arg) {
boolean wantsLocation = arg.format != null || arg.mapFn != null;
boolean hasLocation = arg.location != null && wantsLocation;
ScalarArg scalarArg = ScalarArg.create(arg.format != null, arg.mapFn != null, hasLocation);
arguments.add(scalarArg);
arguments.add(arg.object);
if (hasLocation) {
arguments.add(arg.location);
}
if (scalarArg.hasMapFn) {
arguments.add(arg.mapFn);
}
if (scalarArg.hasFormat) {
arguments.add(arg.format);
}
}
private int eval(
List<Object> arguments,
int argi,
ImmutableList.Builder<String> builder,
StarlarkSemantics starlarkSemantics)
throws CommandLineExpansionException {
Object object = arguments.get(argi++);
final Location location = hasLocation ? (Location) arguments.get(argi++) : null;
if (hasMapFn) {
BaseFunction mapFn = (BaseFunction) arguments.get(argi++);
object = applyMapFn(mapFn, object, location, starlarkSemantics);
}
String stringValue = CommandLineItem.expandToCommandLine(object);
if (hasFormat) {
String formatStr = (String) arguments.get(argi++);
Formatter formatter = Formatter.get(location, starlarkSemantics);
stringValue = formatter.format(formatStr, stringValue);
}
builder.add(stringValue);
return argi;
}
private int addToFingerprint(
List<Object> arguments,
int argi,
Fingerprint fingerprint,
StarlarkSemantics starlarkSemantics)
throws CommandLineExpansionException {
if (hasMapFn) {
return addToFingerprintLegacy(arguments, argi, fingerprint, starlarkSemantics);
}
Object object = arguments.get(argi++);
String stringValue = CommandLineItem.expandToCommandLine(object);
fingerprint.addString(stringValue);
if (hasLocation) {
argi++; // Skip past location slot
}
if (hasFormat) {
String formatStr = (String) arguments.get(argi++);
fingerprint.addUUID(FORMAT_UUID);
fingerprint.addString(formatStr);
}
return argi;
}
private int addToFingerprintLegacy(
List<Object> arguments,
int argi,
Fingerprint fingerprint,
StarlarkSemantics starlarkSemantics)
throws CommandLineExpansionException {
ImmutableList.Builder<String> builder = ImmutableList.builderWithExpectedSize(1);
argi = eval(arguments, argi, builder, starlarkSemantics);
for (String s : builder.build()) {
fingerprint.addString(s);
}
return argi;
}
static class Builder {
private Object object;
private String format;
private BaseFunction mapFn;
private Location location;
Builder(Object object) {
this.object = object;
}
Builder setLocation(Location location) {
this.location = location;
return this;
}
Builder setFormat(String format) {
this.format = format;
return this;
}
Builder setMapFn(BaseFunction mapFn) {
this.mapFn = mapFn;
return this;
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ScalarArg scalarArg = (ScalarArg) o;
return hasFormat == scalarArg.hasFormat
&& hasMapFn == scalarArg.hasMapFn
&& hasLocation == scalarArg.hasLocation;
}
@Override
public int hashCode() {
return Objects.hashCode(hasFormat, hasMapFn, hasLocation);
}
}
static class Builder {
private final StarlarkSemantics starlarkSemantics;
private final ImmutableList.Builder<Object> arguments = ImmutableList.builder();
public Builder(StarlarkSemantics starlarkSemantics) {
this.starlarkSemantics = starlarkSemantics;
}
Builder add(Object object) {
arguments.add(object);
return this;
}
Builder add(VectorArg.Builder vectorArg) {
VectorArg.push(arguments, vectorArg);
return this;
}
Builder add(ScalarArg.Builder scalarArg) {
ScalarArg.push(arguments, scalarArg);
return this;
}
SkylarkCustomCommandLine build() {
return new SkylarkCustomCommandLine(starlarkSemantics, arguments.build());
}
}
@AutoCodec.VisibleForSerialization
@AutoCodec.Instantiator
SkylarkCustomCommandLine(StarlarkSemantics starlarkSemantics, ImmutableList<Object> arguments) {
this.arguments = arguments;
this.starlarkSemantics = starlarkSemantics;
}
@Override
public Iterable<String> arguments() throws CommandLineExpansionException {
return arguments(null);
}
@Override
public Iterable<String> arguments(@Nullable ArtifactExpander artifactExpander)
throws CommandLineExpansionException {
ImmutableList.Builder<String> result = ImmutableList.builder();
for (int argi = 0; argi < arguments.size(); ) {
Object arg = arguments.get(argi++);
if (arg instanceof VectorArg) {
argi = ((VectorArg) arg).eval(arguments, argi, result, artifactExpander, starlarkSemantics);
} else if (arg instanceof ScalarArg) {
argi = ((ScalarArg) arg).eval(arguments, argi, result, starlarkSemantics);
} else {
result.add(CommandLineItem.expandToCommandLine(arg));
}
}
return result.build();
}
@Override
public void addToFingerprint(ActionKeyContext actionKeyContext, Fingerprint fingerprint)
throws CommandLineExpansionException {
for (int argi = 0; argi < arguments.size(); ) {
Object arg = arguments.get(argi++);
if (arg instanceof VectorArg) {
argi =
((VectorArg) arg)
.addToFingerprint(
arguments, argi, actionKeyContext, fingerprint, starlarkSemantics);
} else if (arg instanceof ScalarArg) {
argi = ((ScalarArg) arg).addToFingerprint(arguments, argi, fingerprint, starlarkSemantics);
} else {
fingerprint.addString(CommandLineItem.expandToCommandLine(arg));
}
}
}
private interface Formatter {
String format(String formatStr, String subject) throws CommandLineExpansionException;
static Formatter get(Location location, StarlarkSemantics starlarkSemantics) {
return starlarkSemantics.incompatibleDisallowOldStyleArgsAdd()
? SingleStringArgFormatter::format
: new LegacyFormatter(location);
}
}
private static class LegacyFormatter implements Formatter {
@Nullable private final Location location;
private final ArrayList<Object> args;
public LegacyFormatter(Location location) {
this.location = location;
this.args = new ArrayList<>(1); // Reused arg list to reduce GC
this.args.add(null);
}
@Override
public String format(String formatStr, String subject) throws CommandLineExpansionException {
try {
args.set(0, subject);
SkylarkPrinter printer = Printer.getPrinter();
return printer.formatWithList(formatStr, args).toString();
} catch (IllegalFormatException e) {
throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, null));
}
}
}
private static Object applyMapFn(
BaseFunction mapFn, Object arg, Location location, StarlarkSemantics starlarkSemantics)
throws CommandLineExpansionException {
ImmutableList<Object> args = ImmutableList.of(arg);
try (Mutability mutability = Mutability.create("map_fn")) {
StarlarkThread thread =
StarlarkThread.builder(mutability)
.setSemantics(starlarkSemantics)
.setEventHandler(NullEventHandler.INSTANCE)
.build();
return mapFn.call(args, ImmutableMap.of(), null, thread);
} catch (EvalException e) {
throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, e.getCause()));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new CommandLineExpansionException(
errorMessage("Thread was interrupted", location, null));
}
}
private static void applyMapEach(
BaseFunction mapFn,
List<Object> originalValues,
Consumer<String> consumer,
Location location,
StarlarkSemantics starlarkSemantics)
throws CommandLineExpansionException {
try (Mutability mutability = Mutability.create("map_each")) {
StarlarkThread thread =
StarlarkThread.builder(mutability)
.setSemantics(starlarkSemantics)
// TODO(b/77140311): Error if we issue print statements
.setEventHandler(NullEventHandler.INSTANCE)
.build();
Object[] args = new Object[1];
int count = originalValues.size();
for (int i = 0; i < count; ++i) {
args[0] = originalValues.get(i);
Object ret = mapFn.callWithArgArray(args, null, thread, location);
if (ret instanceof String) {
consumer.accept((String) ret);
} else if (ret instanceof SkylarkList) {
for (Object val : ((SkylarkList) ret)) {
if (!(val instanceof String)) {
throw new CommandLineExpansionException(
"Expected map_each to return string, None, or list of strings, "
+ "found list containing "
+ val.getClass().getSimpleName());
}
consumer.accept((String) val);
}
} else if (ret != Runtime.NONE) {
throw new CommandLineExpansionException(
"Expected map_each to return string, None, or list of strings, found "
+ ret.getClass().getSimpleName());
}
}
} catch (EvalException e) {
throw new CommandLineExpansionException(errorMessage(e.getMessage(), location, e.getCause()));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new CommandLineExpansionException(
errorMessage("Thread was interrupted", location, null));
}
}
private static class CommandLineItemMapEachAdaptor
extends CommandLineItem.ParametrizedMapFn<Object> {
private final BaseFunction mapFn;
private final Location location;
private final StarlarkSemantics starlarkSemantics;
CommandLineItemMapEachAdaptor(
BaseFunction mapFn, Location location, StarlarkSemantics starlarkSemantics) {
this.mapFn = mapFn;
this.location = location;
this.starlarkSemantics = starlarkSemantics;
}
@Override
public void expandToCommandLine(Object object, Consumer<String> args) {
try {
applyMapEach(mapFn, ImmutableList.of(object), args, location, starlarkSemantics);
} catch (CommandLineExpansionException e) {
// Rather than update CommandLineItem#expandToCommandLine and the numerous callers,
// we wrap this in a runtime exception and handle it above
throw new UncheckedCommandLineExpansionException(e);
}
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof CommandLineItemMapEachAdaptor)) {
return false;
}
CommandLineItemMapEachAdaptor other = (CommandLineItemMapEachAdaptor) obj;
// Instance compare intentional
// The normal implementation uses location + name of function,
// which can conceivably conflict in tests
return mapFn == other.mapFn;
}
@Override
public int hashCode() {
// identity hashcode intentional
return System.identityHashCode(mapFn);
}
@Override
public int maxInstancesAllowed() {
// No limit to these, as this is just a wrapper for Skylark functions, which are
// always static
return Integer.MAX_VALUE;
}
}
private static String errorMessage(
String message, @Nullable Location location, @Nullable Throwable cause) {
return LINE_JOINER.join(
"\n", FIELD_JOINER.join(location, message), getCauseMessage(cause, message));
}
private static String getCauseMessage(@Nullable Throwable cause, String message) {
if (cause == null) {
return null;
}
String causeMessage = cause.getMessage();
if (causeMessage == null) {
return null;
}
if (message == null) {
return causeMessage;
}
// Skip the cause if it is redundant with the message so far.
if (message.contains(causeMessage)) {
return null;
}
return causeMessage;
}
private static class UncheckedCommandLineExpansionException extends RuntimeException {
final CommandLineExpansionException cause;
UncheckedCommandLineExpansionException(CommandLineExpansionException cause) {
this.cause = cause;
}
}
/**
* When we expand filesets the user might still expect a File object (since the results may be fed
* into map_each. Therefore we synthesize a File object from the fileset symlink.
*/
static class FilesetSymlinkFile implements FileApi, CommandLineItem {
private final Artifact fileset;
private final PathFragment execPath;
public FilesetSymlinkFile(Artifact fileset, PathFragment execPath) {
this.fileset = fileset;
this.execPath = execPath;
}
private PathFragment getExecPath() {
return execPath;
}
@Override
public String getDirname() {
PathFragment parent = getExecPath().getParentDirectory();
return (parent == null) ? "/" : parent.getSafePathString();
}
@Override
public String getFilename() {
return getExecPath().getBaseName();
}
@Override
public String getExtension() {
return getExecPath().getFileExtension();
}
@Override
public Label getOwnerLabel() {
return fileset.getOwnerLabel();
}
@Override
public FileRootApi getRoot() {
return fileset.getRoot();
}
@Override
public boolean isSourceArtifact() {
// This information is lost to us.
// Since the symlinks are always in the output tree, settle for saying "no"
return false;
}
@Override
public boolean isDirectory() {
return false;
}
@Override
public String getRunfilesPathString() {
PathFragment relativePath = execPath.relativeTo(fileset.getExecPath());
return fileset.getRunfilesPath().getRelative(relativePath).getPathString();
}
@Override
public String getExecPathString() {
return getExecPath().getPathString();
}
@Override
public String expandToCommandLine() {
return getExecPathString();
}
@Override
public void repr(SkylarkPrinter printer) {
if (isSourceArtifact()) {
printer.append("<source file " + getRunfilesPathString() + ">");
} else {
printer.append("<generated file " + getRunfilesPathString() + ">");
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.test.integration.functions.transform;
import static org.junit.Assert.assertTrue;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.wink.json4j.JSONArray;
import org.apache.wink.json4j.JSONObject;
import org.junit.Test;
import org.apache.sysml.api.DMLScript;
import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.io.ReaderBinaryBlock;
import org.apache.sysml.runtime.io.ReaderTextCSV;
import org.apache.sysml.runtime.matrix.data.CSVFileFormatProperties;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.transform.TfUtils;
import org.apache.sysml.test.integration.AutomatedTestBase;
import org.apache.sysml.test.integration.TestConfiguration;
import org.apache.sysml.test.utils.TestUtils;
/**
*
*
*/
public class ScalingTest extends AutomatedTestBase
{
private final static String TEST_NAME = "Scaling";
private final static String TEST_DIR = "functions/transform/";
private final static String TEST_CLASS_DIR = TEST_DIR + ScalingTest.class.getSimpleName() + "/";
private final static int rows1 = 1500;
private final static int cols1 = 16;
@Override
public void setUp()
{
TestUtils.clearAssertionInformation();
addTestConfiguration(TEST_NAME,
new TestConfiguration(TEST_CLASS_DIR, TEST_NAME, new String[]{"R"}));
}
// ---- Scaling CSV ----
@Test
public void testTransformScalingHybridCSV() throws IOException, DMLRuntimeException, Exception
{
runScalingTest(rows1, cols1, RUNTIME_PLATFORM.HYBRID, "csv");
}
@Test
public void testTransformScalingSPHybridCSV() throws IOException, DMLRuntimeException, Exception
{
runScalingTest(rows1, cols1, RUNTIME_PLATFORM.HYBRID_SPARK, "csv");
}
@Test
public void testTransformScalingHadoopCSV() throws IOException, DMLRuntimeException, Exception
{
runScalingTest(rows1, cols1, RUNTIME_PLATFORM.HADOOP, "csv");
}
@Test
public void testTransformScalingSparkCSV() throws IOException, DMLRuntimeException, Exception
{
runScalingTest(rows1, cols1, RUNTIME_PLATFORM.SPARK, "csv");
}
// ---- Scaling BinaryBlock ----
@Test
public void testTransformScalingHybridBinary() throws IOException, DMLRuntimeException, Exception
{
runScalingTest(rows1, cols1, RUNTIME_PLATFORM.HYBRID, "binary");
}
@Test
public void testTransformScalingSPHybridBinary() throws IOException, DMLRuntimeException, Exception
{
runScalingTest(rows1, cols1, RUNTIME_PLATFORM.HYBRID_SPARK, "binary");
}
@Test
public void testTransformScalingHadoopBinary() throws IOException, DMLRuntimeException, Exception
{
runScalingTest(rows1, cols1, RUNTIME_PLATFORM.HADOOP, "binary");
}
@Test
public void testTransformScalingSparkBinary() throws IOException, DMLRuntimeException, Exception
{
runScalingTest(rows1, cols1, RUNTIME_PLATFORM.SPARK, "binary");
}
// ----------------------------
private void generateSpecFile(int cols, String specFile) throws IOException , Exception
{
final String NAME = "name";
final String METHOD = "method";
final String SCALE_METHOD_Z = "z-score";
final String SCALE_METHOD_M = "mean-subtraction";
JSONObject outputSpec = new JSONObject();
JSONArray scaleSpec = new JSONArray();
for(int colID=1; colID <= cols; colID++)
{
JSONObject obj = new JSONObject();
obj.put(NAME, "V"+colID);
if(colID <= cols/2)
obj.put(METHOD, SCALE_METHOD_M);
else
obj.put(METHOD, SCALE_METHOD_Z);
scaleSpec.add(obj);
}
outputSpec.put(TfUtils.TXMETHOD_SCALE, scaleSpec);
FileSystem fs = FileSystem.get(TestUtils.conf);
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(specFile),true)));
out.write(outputSpec.toString());
out.close();
}
private void generateFrameMTD(String datafile) throws IllegalArgumentException, IOException , Exception
{
JSONObject mtd = new JSONObject();
mtd.put("data_type", "frame");
mtd.put("format", "csv");
mtd.put("header", false);
FileSystem fs = FileSystem.get(TestUtils.conf);
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(datafile+".mtd"),true)));
out.write(mtd.toString());
out.close();
}
/**
*
* @param sparseM1
* @param sparseM2
* @param instType
* @throws IOException
* @throws DMLRuntimeException
*/
private void runScalingTest( int rows, int cols, RUNTIME_PLATFORM rt, String ofmt) throws IOException, DMLRuntimeException, Exception
{
RUNTIME_PLATFORM platformOld = rtplatform;
rtplatform = rt;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if( rtplatform == RUNTIME_PLATFORM.SPARK || rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try
{
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
String specFile = input("spec.json");
String inputFile = input("X");
String outputFile = output(config.getOutputFiles()[0]);
String outputFileR = expected(config.getOutputFiles()[0]);
generateSpecFile(cols, specFile);
// This is for running the junit test the new way, i.e., construct the arguments directly
fullDMLScriptName = HOME + TEST_NAME + ".dml";
programArgs = new String[]{"-nvargs",
"DATA=" + inputFile,
"TFSPEC=" + specFile,
"TFMTD=" + output("tfmtd"),
"TFDATA=" + outputFile,
"OFMT=" + ofmt };
fullRScriptName = HOME + TEST_NAME + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputFile + " " + outputFileR;
//generate actual dataset
double[][] X = getRandomMatrix(rows, cols, -50, 50, 1.0, 7);
TestUtils.writeCSVTestMatrix(inputFile, X);
generateFrameMTD(inputFile);
runTest(true, false, null, -1);
runRScript(true);
ReaderTextCSV expReader= new ReaderTextCSV(new CSVFileFormatProperties(false, ",", true, 0, null));
MatrixBlock exp = expReader.readMatrixFromHDFS(outputFileR, -1, -1, -1, -1, -1);
MatrixBlock out = null;
if ( ofmt.equals("csv") )
{
ReaderTextCSV outReader= new ReaderTextCSV(new CSVFileFormatProperties(false, ",", true, 0, null));
out = outReader.readMatrixFromHDFS(outputFile, -1, -1, -1, -1, -1);
}
else
{
ReaderBinaryBlock bbReader = new ReaderBinaryBlock(false);
out = bbReader.readMatrixFromHDFS(
outputFile, exp.getNumRows(), exp.getNumColumns(),
ConfigurationManager.getBlocksize(),
ConfigurationManager.getBlocksize(),
-1);
}
assertTrue("Incorrect output from data transform.", TransformTest.equals(out,exp, 1e-10));
}
finally
{
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
}
}
}
| |
/*
* This file is part of ClassGraph.
*
* Author: Luke Hutchison
*
* Hosted at: https://github.com/classgraph/classgraph
*
* --
*
* The MIT License (MIT)
*
* Copyright (c) 2019 Luke Hutchison
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
* EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.github.classgraph;
import nonapi.io.github.classgraph.types.ParseException;
/**
* Stores the type descriptor of a {@code Class<?>}, as found in an annotation parameter value.
*/
public class AnnotationClassRef extends ScanResultObject {
/** The type descriptor str. */
private String typeDescriptorStr;
/** The type signature. */
private transient TypeSignature typeSignature;
/** The class name. */
private transient String className;
/**
* Constructor.
*/
AnnotationClassRef() {
super();
}
/**
* Constructor.
*
* @param typeDescriptorStr
* the type descriptor str
*/
AnnotationClassRef(final String typeDescriptorStr) {
super();
this.typeDescriptorStr = typeDescriptorStr;
}
// -------------------------------------------------------------------------------------------------------------
/**
* Get the name of the referenced class.
*
* @return The name of the referenced class.
*/
public String getName() {
return getClassName();
}
/**
* Get the type signature.
*
* @return The type signature of the {@code Class<?>} reference. This will be a {@link ClassRefTypeSignature}, a
* {@link BaseTypeSignature}, or an {@link ArrayTypeSignature}.
*/
private TypeSignature getTypeSignature() {
if (typeSignature == null) {
try {
// There can't be any type variables to resolve in ClassRefTypeSignature,
// BaseTypeSignature or ArrayTypeSignature, so just set definingClassName to null
typeSignature = TypeSignature.parse(typeDescriptorStr, /* definingClassName = */ null);
typeSignature.setScanResult(scanResult);
} catch (final ParseException e) {
throw new IllegalArgumentException(e);
}
}
return typeSignature;
}
/**
* Loads the referenced class, returning a {@code Class<?>} reference for the referenced class.
*
* @param ignoreExceptions
* if true, ignore exceptions and instead return null if the class could not be loaded.
* @return The {@code Class<?>} reference for the referenced class.
* @throws IllegalArgumentException
* if the class could not be loaded and ignoreExceptions was false.
*/
@Override
public Class<?> loadClass(final boolean ignoreExceptions) {
getTypeSignature();
if (typeSignature instanceof BaseTypeSignature) {
return ((BaseTypeSignature) typeSignature).getType();
} else if (typeSignature instanceof ClassRefTypeSignature) {
return ((ClassRefTypeSignature) typeSignature).loadClass(ignoreExceptions);
} else if (typeSignature instanceof ArrayTypeSignature) {
return ((ArrayTypeSignature) typeSignature).loadClass(ignoreExceptions);
} else {
throw new IllegalArgumentException("Got unexpected type " + typeSignature.getClass().getName()
+ " for ref type signature: " + typeDescriptorStr);
}
}
/**
* Loads the referenced class, returning a {@code Class<?>} reference for the referenced class.
*
* @return The {@code Class<?>} reference for the referenced class.
* @throws IllegalArgumentException
* if the class could not be loaded.
*/
@Override
public Class<?> loadClass() {
return loadClass(/* ignoreExceptions = */ false);
}
// -------------------------------------------------------------------------------------------------------------
/* (non-Javadoc)
* @see io.github.classgraph.ScanResultObject#getClassName()
*/
@Override
protected String getClassName() {
if (className == null) {
getTypeSignature();
if (typeSignature instanceof BaseTypeSignature) {
className = ((BaseTypeSignature) typeSignature).getTypeStr();
} else if (typeSignature instanceof ClassRefTypeSignature) {
className = ((ClassRefTypeSignature) typeSignature).getFullyQualifiedClassName();
} else if (typeSignature instanceof ArrayTypeSignature) {
className = ((ArrayTypeSignature) typeSignature).getClassName();
} else {
throw new IllegalArgumentException("Got unexpected type " + typeSignature.getClass().getName()
+ " for ref type signature: " + typeDescriptorStr);
}
}
return className;
}
/**
* Get the class info.
*
* @return The {@link ClassInfo} object for the referenced class, or null if the referenced class was not
* encountered during scanning (i.e. if no ClassInfo object was created for the class during scanning).
* N.B. even if this method returns null, {@link #loadClass()} may be able to load the referenced class
* by name.
*/
@Override
public ClassInfo getClassInfo() {
getTypeSignature();
return typeSignature.getClassInfo();
}
/* (non-Javadoc)
* @see io.github.classgraph.ScanResultObject#setScanResult(io.github.classgraph.ScanResult)
*/
@Override
void setScanResult(final ScanResult scanResult) {
super.setScanResult(scanResult);
if (typeSignature != null) {
typeSignature.setScanResult(scanResult);
}
}
// -------------------------------------------------------------------------------------------------------------
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return getTypeSignature().hashCode();
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
if (obj == this) {
return true;
} else if (!(obj instanceof AnnotationClassRef)) {
return false;
}
return getTypeSignature().equals(((AnnotationClassRef) obj).getTypeSignature());
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
// String prefix = "class ";
if (scanResult != null) {
final ClassInfo ci = getClassInfo();
// The JDK uses "interface" for both interfaces and annotations in Annotation::toString
if (ci != null && ci.isInterfaceOrAnnotation()) {
// prefix = "interface ";
}
}
// More recent versions of Annotation::toString() have dropped the "class"/"interface" prefix,
// and added ".class" to the end of the class reference (which does not actually match the
// annotation source syntax...)
return /* prefix + */ getTypeSignature().toString() + ".class";
}
}
| |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine;
import android.content.Context;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.AssetManager;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import io.flutter.FlutterInjector;
import io.flutter.Log;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.embedding.engine.dart.DartExecutor.DartEntrypoint;
import io.flutter.embedding.engine.deferredcomponents.DeferredComponentManager;
import io.flutter.embedding.engine.loader.FlutterLoader;
import io.flutter.embedding.engine.plugins.PluginRegistry;
import io.flutter.embedding.engine.plugins.activity.ActivityControlSurface;
import io.flutter.embedding.engine.plugins.broadcastreceiver.BroadcastReceiverControlSurface;
import io.flutter.embedding.engine.plugins.contentprovider.ContentProviderControlSurface;
import io.flutter.embedding.engine.plugins.service.ServiceControlSurface;
import io.flutter.embedding.engine.plugins.util.GeneratedPluginRegister;
import io.flutter.embedding.engine.renderer.FlutterRenderer;
import io.flutter.embedding.engine.renderer.RenderSurface;
import io.flutter.embedding.engine.systemchannels.AccessibilityChannel;
import io.flutter.embedding.engine.systemchannels.DeferredComponentChannel;
import io.flutter.embedding.engine.systemchannels.KeyEventChannel;
import io.flutter.embedding.engine.systemchannels.LifecycleChannel;
import io.flutter.embedding.engine.systemchannels.LocalizationChannel;
import io.flutter.embedding.engine.systemchannels.MouseCursorChannel;
import io.flutter.embedding.engine.systemchannels.NavigationChannel;
import io.flutter.embedding.engine.systemchannels.PlatformChannel;
import io.flutter.embedding.engine.systemchannels.RestorationChannel;
import io.flutter.embedding.engine.systemchannels.SettingsChannel;
import io.flutter.embedding.engine.systemchannels.SystemChannel;
import io.flutter.embedding.engine.systemchannels.TextInputChannel;
import io.flutter.plugin.localization.LocalizationPlugin;
import io.flutter.plugin.platform.PlatformViewsController;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* A single Flutter execution environment.
*
* <p>The {@code FlutterEngine} is the container through which Dart code can be run in an Android
* application.
*
* <p>Dart code in a {@code FlutterEngine} can execute in the background, or it can be render to the
* screen by using the accompanying {@link FlutterRenderer} and Dart code using the Flutter
* framework on the Dart side. Rendering can be started and stopped, thus allowing a {@code
* FlutterEngine} to move from UI interaction to data-only processing and then back to UI
* interaction.
*
* <p>Multiple {@code FlutterEngine}s may exist, execute Dart code, and render UIs within a single
* Android app. For better memory performance characteristics, construct multiple {@code
* FlutterEngine}s via {@link io.flutter.embedding.engine.FlutterEngineGroup} rather than via {@code
* FlutterEngine}'s constructor directly.
*
* <p>To start running Dart and/or Flutter within this {@code FlutterEngine}, get a reference to
* this engine's {@link DartExecutor} and then use {@link
* DartExecutor#executeDartEntrypoint(DartExecutor.DartEntrypoint)}. The {@link
* DartExecutor#executeDartEntrypoint(DartExecutor.DartEntrypoint)} method must not be invoked twice
* on the same {@code FlutterEngine}.
*
* <p>To start rendering Flutter content to the screen, use {@link #getRenderer()} to obtain a
* {@link FlutterRenderer} and then attach a {@link RenderSurface}. Consider using a {@link
* io.flutter.embedding.android.FlutterView} as a {@link RenderSurface}.
*
* <p>Instatiating the first {@code FlutterEngine} per process will also load the Flutter engine's
* native library and start the Dart VM. Subsequent {@code FlutterEngine}s will run on the same VM
* instance but will have their own Dart <a
* href="https://api.dartlang.org/stable/dart-isolate/Isolate-class.html">Isolate</a> when the
* {@link DartExecutor} is run. Each Isolate is a self-contained Dart environment and cannot
* communicate with each other except via Isolate ports.
*/
public class FlutterEngine {
private static final String TAG = "FlutterEngine";
@NonNull private final FlutterJNI flutterJNI;
@NonNull private final FlutterRenderer renderer;
@NonNull private final DartExecutor dartExecutor;
@NonNull private final FlutterEngineConnectionRegistry pluginRegistry;
@NonNull private final LocalizationPlugin localizationPlugin;
// System channels.
@NonNull private final AccessibilityChannel accessibilityChannel;
@NonNull private final DeferredComponentChannel deferredComponentChannel;
@NonNull private final KeyEventChannel keyEventChannel;
@NonNull private final LifecycleChannel lifecycleChannel;
@NonNull private final LocalizationChannel localizationChannel;
@NonNull private final MouseCursorChannel mouseCursorChannel;
@NonNull private final NavigationChannel navigationChannel;
@NonNull private final RestorationChannel restorationChannel;
@NonNull private final PlatformChannel platformChannel;
@NonNull private final SettingsChannel settingsChannel;
@NonNull private final SystemChannel systemChannel;
@NonNull private final TextInputChannel textInputChannel;
// Platform Views.
@NonNull private final PlatformViewsController platformViewsController;
// Engine Lifecycle.
@NonNull private final Set<EngineLifecycleListener> engineLifecycleListeners = new HashSet<>();
@NonNull
private final EngineLifecycleListener engineLifecycleListener =
new EngineLifecycleListener() {
@SuppressWarnings("unused")
public void onPreEngineRestart() {
Log.v(TAG, "onPreEngineRestart()");
for (EngineLifecycleListener lifecycleListener : engineLifecycleListeners) {
lifecycleListener.onPreEngineRestart();
}
platformViewsController.onPreEngineRestart();
restorationChannel.clearData();
}
@Override
public void onEngineWillDestroy() {
// This inner implementation doesn't do anything since FlutterEngine sent this
// notification in the first place. It's meant for external listeners.
}
};
/**
* Constructs a new {@code FlutterEngine}.
*
* <p>A new {@code FlutterEngine} does not execute any Dart code automatically. See {@link
* #getDartExecutor()} and {@link DartExecutor#executeDartEntrypoint(DartExecutor.DartEntrypoint)}
* to begin executing Dart code within this {@code FlutterEngine}.
*
* <p>A new {@code FlutterEngine} will not display any UI until a {@link RenderSurface} is
* registered. See {@link #getRenderer()} and {@link
* FlutterRenderer#startRenderingToSurface(Surface)}.
*
* <p>A new {@code FlutterEngine} automatically attaches all plugins. See {@link #getPlugins()}.
*
* <p>A new {@code FlutterEngine} does come with all default system channels attached.
*
* <p>The first {@code FlutterEngine} instance constructed per process will also load the Flutter
* native library and start a Dart VM.
*
* <p>In order to pass Dart VM initialization arguments (see {@link
* io.flutter.embedding.engine.FlutterShellArgs}) when creating the VM, manually set the
* initialization arguments by calling {@link
* io.flutter.embedding.engine.loader.FlutterLoader#startInitialization(Context)} and {@link
* io.flutter.embedding.engine.loader.FlutterLoader#ensureInitializationComplete(Context,
* String[])} before constructing the engine.
*/
public FlutterEngine(@NonNull Context context) {
this(context, null);
}
/**
* Same as {@link #FlutterEngine(Context)} with added support for passing Dart VM arguments.
*
* <p>If the Dart VM has already started, the given arguments will have no effect.
*/
public FlutterEngine(@NonNull Context context, @Nullable String[] dartVmArgs) {
this(context, /* flutterLoader */ null, /* flutterJNI */ null, dartVmArgs, true);
}
/**
* Same as {@link #FlutterEngine(Context)} with added support for passing Dart VM arguments and
* avoiding automatic plugin registration.
*
* <p>If the Dart VM has already started, the given arguments will have no effect.
*/
public FlutterEngine(
@NonNull Context context,
@Nullable String[] dartVmArgs,
boolean automaticallyRegisterPlugins) {
this(
context,
/* flutterLoader */ null,
/* flutterJNI */ null,
dartVmArgs,
automaticallyRegisterPlugins);
}
/**
* Same as {@link #FlutterEngine(Context, String[], boolean)} with added support for configuring
* whether the engine will receive restoration data.
*
* <p>The {@code waitForRestorationData} flag controls whether the engine delays responding to
* requests from the framework for restoration data until that data has been provided to the
* engine via {@code RestorationChannel.setRestorationData(byte[] data)}. If the flag is false,
* the framework may temporarily initialize itself to default values before the restoration data
* has been made available to the engine. Setting {@code waitForRestorationData} to true avoids
* this extra work by delaying initialization until the data is available.
*
* <p>When {@code waitForRestorationData} is set, {@code
* RestorationChannel.setRestorationData(byte[] data)} must be called at a later point in time. If
* it later turns out that no restoration data is available to restore the framework from, that
* method must still be called with null as an argument to indicate "no data".
*
* <p>If the framework never requests the restoration data, this flag has no effect.
*/
public FlutterEngine(
@NonNull Context context,
@Nullable String[] dartVmArgs,
boolean automaticallyRegisterPlugins,
boolean waitForRestorationData) {
this(
context,
/* flutterLoader */ null,
/* flutterJNI */ null,
new PlatformViewsController(),
dartVmArgs,
automaticallyRegisterPlugins,
waitForRestorationData);
}
/**
* Same as {@link #FlutterEngine(Context, FlutterLoader, FlutterJNI, String[], boolean)} but with
* no Dart VM flags and automatically registers plugins.
*
* <p>{@code flutterJNI} should be a new instance that has never been attached to an engine
* before.
*/
public FlutterEngine(
@NonNull Context context,
@Nullable FlutterLoader flutterLoader,
@NonNull FlutterJNI flutterJNI) {
this(context, flutterLoader, flutterJNI, null, true);
}
/**
* Same as {@link #FlutterEngine(Context, FlutterLoader, FlutterJNI)}, plus Dart VM flags in
* {@code dartVmArgs}, and control over whether plugins are automatically registered with this
* {@code FlutterEngine} in {@code automaticallyRegisterPlugins}. If plugins are automatically
* registered, then they are registered during the execution of this constructor.
*/
public FlutterEngine(
@NonNull Context context,
@Nullable FlutterLoader flutterLoader,
@NonNull FlutterJNI flutterJNI,
@Nullable String[] dartVmArgs,
boolean automaticallyRegisterPlugins) {
this(
context,
flutterLoader,
flutterJNI,
new PlatformViewsController(),
dartVmArgs,
automaticallyRegisterPlugins);
}
/**
* Same as {@link #FlutterEngine(Context, FlutterLoader, FlutterJNI, String[], boolean)}, plus the
* ability to provide a custom {@code PlatformViewsController}.
*/
public FlutterEngine(
@NonNull Context context,
@Nullable FlutterLoader flutterLoader,
@NonNull FlutterJNI flutterJNI,
@NonNull PlatformViewsController platformViewsController,
@Nullable String[] dartVmArgs,
boolean automaticallyRegisterPlugins) {
this(
context,
flutterLoader,
flutterJNI,
platformViewsController,
dartVmArgs,
automaticallyRegisterPlugins,
false);
}
/** Fully configurable {@code FlutterEngine} constructor. */
public FlutterEngine(
@NonNull Context context,
@Nullable FlutterLoader flutterLoader,
@NonNull FlutterJNI flutterJNI,
@NonNull PlatformViewsController platformViewsController,
@Nullable String[] dartVmArgs,
boolean automaticallyRegisterPlugins,
boolean waitForRestorationData) {
AssetManager assetManager;
try {
assetManager = context.createPackageContext(context.getPackageName(), 0).getAssets();
} catch (NameNotFoundException e) {
assetManager = context.getAssets();
}
FlutterInjector injector = FlutterInjector.instance();
if (flutterJNI == null) {
flutterJNI = injector.getFlutterJNIFactory().provideFlutterJNI();
}
this.flutterJNI = flutterJNI;
this.dartExecutor = new DartExecutor(flutterJNI, assetManager);
this.dartExecutor.onAttachedToJNI();
DeferredComponentManager deferredComponentManager =
FlutterInjector.instance().deferredComponentManager();
accessibilityChannel = new AccessibilityChannel(dartExecutor, flutterJNI);
deferredComponentChannel = new DeferredComponentChannel(dartExecutor);
keyEventChannel = new KeyEventChannel(dartExecutor);
lifecycleChannel = new LifecycleChannel(dartExecutor);
localizationChannel = new LocalizationChannel(dartExecutor);
mouseCursorChannel = new MouseCursorChannel(dartExecutor);
navigationChannel = new NavigationChannel(dartExecutor);
platformChannel = new PlatformChannel(dartExecutor);
restorationChannel = new RestorationChannel(dartExecutor, waitForRestorationData);
settingsChannel = new SettingsChannel(dartExecutor);
systemChannel = new SystemChannel(dartExecutor);
textInputChannel = new TextInputChannel(dartExecutor);
if (deferredComponentManager != null) {
deferredComponentManager.setDeferredComponentChannel(deferredComponentChannel);
}
this.localizationPlugin = new LocalizationPlugin(context, localizationChannel);
if (flutterLoader == null) {
flutterLoader = injector.flutterLoader();
}
if (!flutterJNI.isAttached()) {
flutterLoader.startInitialization(context.getApplicationContext());
flutterLoader.ensureInitializationComplete(context, dartVmArgs);
}
flutterJNI.addEngineLifecycleListener(engineLifecycleListener);
flutterJNI.setPlatformViewsController(platformViewsController);
flutterJNI.setLocalizationPlugin(localizationPlugin);
flutterJNI.setDeferredComponentManager(injector.deferredComponentManager());
// It should typically be a fresh, unattached JNI. But on a spawned engine, the JNI instance
// is already attached to a native shell. In that case, the Java FlutterEngine is created around
// an existing shell.
if (!flutterJNI.isAttached()) {
attachToJni();
}
// TODO(mattcarroll): FlutterRenderer is temporally coupled to attach(). Remove that coupling if
// possible.
this.renderer = new FlutterRenderer(flutterJNI);
this.platformViewsController = platformViewsController;
this.platformViewsController.onAttachedToJNI();
this.pluginRegistry =
new FlutterEngineConnectionRegistry(context.getApplicationContext(), this, flutterLoader);
// Only automatically register plugins if both constructor parameter and
// loaded AndroidManifest config turn this feature on.
if (automaticallyRegisterPlugins && flutterLoader.automaticallyRegisterPlugins()) {
GeneratedPluginRegister.registerGeneratedPlugins(this);
}
}
private void attachToJni() {
Log.v(TAG, "Attaching to JNI.");
flutterJNI.attachToNative();
if (!isAttachedToJni()) {
throw new RuntimeException("FlutterEngine failed to attach to its native Object reference.");
}
}
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
private boolean isAttachedToJni() {
return flutterJNI.isAttached();
}
/**
* Create a second {@link io.flutter.embedding.engine.FlutterEngine} based on this current one by
* sharing as much resources together as possible to minimize startup latency and memory cost.
*
* @param context is a Context used to create the {@link
* io.flutter.embedding.engine.FlutterEngine}. Could be the same Context as the current engine
* or a different one. Generally, only an application Context is needed for the {@link
* io.flutter.embedding.engine.FlutterEngine} and its dependencies.
* @param dartEntrypoint specifies the {@link DartEntrypoint} the new engine should run. It
* doesn't need to be the same entrypoint as the current engine but must be built in the same
* AOT or snapshot.
* @param initialRoute The name of the initial Flutter `Navigator` `Route` to load. If this is
* null, it will default to the "/" route.
* @param dartEntrypointArgs Arguments passed as a list of string to Dart's entrypoint function.
* @return a new {@link io.flutter.embedding.engine.FlutterEngine}.
*/
@NonNull
/*package*/ FlutterEngine spawn(
@NonNull Context context,
@NonNull DartEntrypoint dartEntrypoint,
@Nullable String initialRoute,
@Nullable List<String> dartEntrypointArgs) {
if (!isAttachedToJni()) {
throw new IllegalStateException(
"Spawn can only be called on a fully constructed FlutterEngine");
}
FlutterJNI newFlutterJNI =
flutterJNI.spawn(
dartEntrypoint.dartEntrypointFunctionName,
dartEntrypoint.dartEntrypointLibrary,
initialRoute,
dartEntrypointArgs);
return new FlutterEngine(
context, // Context.
null, // FlutterLoader. A null value passed here causes the constructor to get it from the
// FlutterInjector.
newFlutterJNI); // FlutterJNI.
}
/**
* Cleans up all components within this {@code FlutterEngine} and destroys the associated Dart
* Isolate. All state held by the Dart Isolate, such as the Flutter Elements tree, is lost.
*
* <p>This {@code FlutterEngine} instance should be discarded after invoking this method.
*/
public void destroy() {
Log.v(TAG, "Destroying.");
for (EngineLifecycleListener listener : engineLifecycleListeners) {
listener.onEngineWillDestroy();
}
// The order that these things are destroyed is important.
pluginRegistry.destroy();
platformViewsController.onDetachedFromJNI();
dartExecutor.onDetachedFromJNI();
flutterJNI.removeEngineLifecycleListener(engineLifecycleListener);
flutterJNI.setDeferredComponentManager(null);
flutterJNI.detachFromNativeAndReleaseResources();
if (FlutterInjector.instance().deferredComponentManager() != null) {
FlutterInjector.instance().deferredComponentManager().destroy();
deferredComponentChannel.setDeferredComponentManager(null);
}
}
/**
* Adds a {@code listener} to be notified of Flutter engine lifecycle events, e.g., {@code
* onPreEngineStart()}.
*/
public void addEngineLifecycleListener(@NonNull EngineLifecycleListener listener) {
engineLifecycleListeners.add(listener);
}
/**
* Removes a {@code listener} that was previously added with {@link
* #addEngineLifecycleListener(EngineLifecycleListener)}.
*/
public void removeEngineLifecycleListener(@NonNull EngineLifecycleListener listener) {
engineLifecycleListeners.remove(listener);
}
/**
* The Dart execution context associated with this {@code FlutterEngine}.
*
* <p>The {@link DartExecutor} can be used to start executing Dart code from a given entrypoint.
* See {@link DartExecutor#executeDartEntrypoint(DartExecutor.DartEntrypoint)}.
*
* <p>Use the {@link DartExecutor} to connect any desired message channels and method channels to
* facilitate communication between Android and Dart/Flutter.
*/
@NonNull
public DartExecutor getDartExecutor() {
return dartExecutor;
}
/**
* The rendering system associated with this {@code FlutterEngine}.
*
* <p>To render a Flutter UI that is produced by this {@code FlutterEngine}'s Dart code, attach a
* {@link RenderSurface} to this {@link FlutterRenderer}.
*/
@NonNull
public FlutterRenderer getRenderer() {
return renderer;
}
/** System channel that sends accessibility requests and events from Flutter to Android. */
@NonNull
public AccessibilityChannel getAccessibilityChannel() {
return accessibilityChannel;
}
/** System channel that sends key events from Android to Flutter. */
@NonNull
public KeyEventChannel getKeyEventChannel() {
return keyEventChannel;
}
/** System channel that sends Android lifecycle events to Flutter. */
@NonNull
public LifecycleChannel getLifecycleChannel() {
return lifecycleChannel;
}
/** System channel that sends locale data from Android to Flutter. */
@NonNull
public LocalizationChannel getLocalizationChannel() {
return localizationChannel;
}
/** System channel that sends Flutter navigation commands from Android to Flutter. */
@NonNull
public NavigationChannel getNavigationChannel() {
return navigationChannel;
}
/**
* System channel that sends platform-oriented requests and information to Flutter, e.g., requests
* to play sounds, requests for haptics, system chrome settings, etc.
*/
@NonNull
public PlatformChannel getPlatformChannel() {
return platformChannel;
}
/**
* System channel to exchange restoration data between framework and engine.
*
* <p>The engine can obtain the current restoration data from the framework via this channel to
* store it on disk and - when the app is relaunched - provide the stored data back to the
* framework to recreate the original state of the app.
*/
@NonNull
public RestorationChannel getRestorationChannel() {
return restorationChannel;
}
/**
* System channel that sends platform/user settings from Android to Flutter, e.g., time format,
* scale factor, etc.
*/
@NonNull
public SettingsChannel getSettingsChannel() {
return settingsChannel;
}
/** System channel that allows manual installation and state querying of deferred components. */
@NonNull
public DeferredComponentChannel getDeferredComponentChannel() {
return deferredComponentChannel;
}
/** System channel that sends memory pressure warnings from Android to Flutter. */
@NonNull
public SystemChannel getSystemChannel() {
return systemChannel;
}
/** System channel that sends and receives text input requests and state. */
@NonNull
public MouseCursorChannel getMouseCursorChannel() {
return mouseCursorChannel;
}
/** System channel that sends and receives text input requests and state. */
@NonNull
public TextInputChannel getTextInputChannel() {
return textInputChannel;
}
/**
* Plugin registry, which registers plugins that want to be applied to this {@code FlutterEngine}.
*/
@NonNull
public PluginRegistry getPlugins() {
return pluginRegistry;
}
/** The LocalizationPlugin this FlutterEngine created. */
@NonNull
public LocalizationPlugin getLocalizationPlugin() {
return localizationPlugin;
}
/**
* {@code PlatformViewsController}, which controls all platform views running within this {@code
* FlutterEngine}.
*/
@NonNull
public PlatformViewsController getPlatformViewsController() {
return platformViewsController;
}
@NonNull
public ActivityControlSurface getActivityControlSurface() {
return pluginRegistry;
}
@NonNull
public ServiceControlSurface getServiceControlSurface() {
return pluginRegistry;
}
@NonNull
public BroadcastReceiverControlSurface getBroadcastReceiverControlSurface() {
return pluginRegistry;
}
@NonNull
public ContentProviderControlSurface getContentProviderControlSurface() {
return pluginRegistry;
}
/** Lifecycle callbacks for Flutter engine lifecycle events. */
public interface EngineLifecycleListener {
/** Lifecycle callback invoked before a hot restart of the Flutter engine. */
void onPreEngineRestart();
/**
* Lifecycle callback invoked before the Flutter engine is destroyed.
*
* <p>For the duration of the call, the Flutter engine is still valid.
*/
void onEngineWillDestroy();
}
}
| |
/*
* Copyright 2007 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.datamatrix.decoder;
import com.google.zxing.FormatException;
import com.google.zxing.common.BitMatrix;
/**
* @author bbrown@google.com (Brian Brown)
*/
final class BitMatrixParser {
private final BitMatrix mappingBitMatrix;
private final BitMatrix readMappingMatrix;
private final Version version;
/**
* @param bitMatrix {@link BitMatrix} to parse
* @throws FormatException if dimension is < 8 or > 144 or not 0 mod 2
*/
BitMatrixParser(BitMatrix bitMatrix) throws FormatException {
int dimension = bitMatrix.getHeight();
if (dimension < 8 || dimension > 144 || (dimension & 0x01) != 0) {
throw FormatException.getFormatInstance();
}
version = readVersion(bitMatrix);
this.mappingBitMatrix = extractDataRegion(bitMatrix);
this.readMappingMatrix = new BitMatrix(this.mappingBitMatrix.getWidth(), this.mappingBitMatrix.getHeight());
}
Version getVersion() {
return version;
}
/**
* <p>Creates the version object based on the dimension of the original bit matrix from
* the datamatrix code.</p>
*
* <p>See ISO 16022:2006 Table 7 - ECC 200 symbol attributes</p>
*
* @param bitMatrix Original {@link BitMatrix} including alignment patterns
* @return {@link Version} encapsulating the Data Matrix Code's "version"
* @throws FormatException if the dimensions of the mapping matrix are not valid
* Data Matrix dimensions.
*/
private static Version readVersion(BitMatrix bitMatrix) throws FormatException {
int numRows = bitMatrix.getHeight();
int numColumns = bitMatrix.getWidth();
return Version.getVersionForDimensions(numRows, numColumns);
}
/**
* <p>Reads the bits in the {@link BitMatrix} representing the mapping matrix (No alignment patterns)
* in the correct order in order to reconstitute the codewords bytes contained within the
* Data Matrix Code.</p>
*
* @return bytes encoded within the Data Matrix Code
* @throws FormatException if the exact number of bytes expected is not read
*/
byte[] readCodewords() throws FormatException {
byte[] result = new byte[version.getTotalCodewords()];
int resultOffset = 0;
int row = 4;
int column = 0;
int numRows = mappingBitMatrix.getHeight();
int numColumns = mappingBitMatrix.getWidth();
boolean corner1Read = false;
boolean corner2Read = false;
boolean corner3Read = false;
boolean corner4Read = false;
// Read all of the codewords
do {
// Check the four corner cases
if ((row == numRows) && (column == 0) && !corner1Read) {
result[resultOffset++] = (byte) readCorner1(numRows, numColumns);
row -= 2;
column += 2;
corner1Read = true;
} else if ((row == numRows - 2) && (column == 0) && ((numColumns & 0x03) != 0) && !corner2Read) {
result[resultOffset++] = (byte) readCorner2(numRows, numColumns);
row -= 2;
column += 2;
corner2Read = true;
} else if ((row == numRows + 4) && (column == 2) && ((numColumns & 0x07) == 0) && !corner3Read) {
result[resultOffset++] = (byte) readCorner3(numRows, numColumns);
row -= 2;
column += 2;
corner3Read = true;
} else if ((row == numRows - 2) && (column == 0) && ((numColumns & 0x07) == 4) && !corner4Read) {
result[resultOffset++] = (byte) readCorner4(numRows, numColumns);
row -= 2;
column += 2;
corner4Read = true;
} else {
// Sweep upward diagonally to the right
do {
if ((row < numRows) && (column >= 0) && !readMappingMatrix.get(column, row)) {
result[resultOffset++] = (byte) readUtah(row, column, numRows, numColumns);
}
row -= 2;
column += 2;
} while ((row >= 0) && (column < numColumns));
row += 1;
column += 3;
// Sweep downward diagonally to the left
do {
if ((row >= 0) && (column < numColumns) && !readMappingMatrix.get(column, row)) {
result[resultOffset++] = (byte) readUtah(row, column, numRows, numColumns);
}
row += 2;
column -= 2;
} while ((row < numRows) && (column >= 0));
row += 3;
column += 1;
}
} while ((row < numRows) || (column < numColumns));
if (resultOffset != version.getTotalCodewords()) {
throw FormatException.getFormatInstance();
}
return result;
}
/**
* <p>Reads a bit of the mapping matrix accounting for boundary wrapping.</p>
*
* @param row Row to read in the mapping matrix
* @param column Column to read in the mapping matrix
* @param numRows Number of rows in the mapping matrix
* @param numColumns Number of columns in the mapping matrix
* @return value of the given bit in the mapping matrix
*/
private boolean readModule(int row, int column, int numRows, int numColumns) {
// Adjust the row and column indices based on boundary wrapping
if (row < 0) {
row += numRows;
column += 4 - ((numRows + 4) & 0x07);
}
if (column < 0) {
column += numColumns;
row += 4 - ((numColumns + 4) & 0x07);
}
readMappingMatrix.set(column, row);
return mappingBitMatrix.get(column, row);
}
/**
* <p>Reads the 8 bits of the standard Utah-shaped pattern.</p>
*
* <p>See ISO 16022:2006, 5.8.1 Figure 6</p>
*
* @param row Current row in the mapping matrix, anchored at the 8th bit (LSB) of the pattern
* @param column Current column in the mapping matrix, anchored at the 8th bit (LSB) of the pattern
* @param numRows Number of rows in the mapping matrix
* @param numColumns Number of columns in the mapping matrix
* @return byte from the utah shape
*/
private int readUtah(int row, int column, int numRows, int numColumns) {
int currentByte = 0;
if (readModule(row - 2, column - 2, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(row - 2, column - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(row - 1, column - 2, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(row - 1, column - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(row - 1, column, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(row, column - 2, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(row, column - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(row, column, numRows, numColumns)) {
currentByte |= 1;
}
return currentByte;
}
/**
* <p>Reads the 8 bits of the special corner condition 1.</p>
*
* <p>See ISO 16022:2006, Figure F.3</p>
*
* @param numRows Number of rows in the mapping matrix
* @param numColumns Number of columns in the mapping matrix
* @return byte from the Corner condition 1
*/
private int readCorner1(int numRows, int numColumns) {
int currentByte = 0;
if (readModule(numRows - 1, 0, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(numRows - 1, 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(numRows - 1, 2, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 2, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(1, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(2, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(3, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
return currentByte;
}
/**
* <p>Reads the 8 bits of the special corner condition 2.</p>
*
* <p>See ISO 16022:2006, Figure F.4</p>
*
* @param numRows Number of rows in the mapping matrix
* @param numColumns Number of columns in the mapping matrix
* @return byte from the Corner condition 2
*/
private int readCorner2(int numRows, int numColumns) {
int currentByte = 0;
if (readModule(numRows - 3, 0, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(numRows - 2, 0, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(numRows - 1, 0, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 4, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 3, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 2, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(1, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
return currentByte;
}
/**
* <p>Reads the 8 bits of the special corner condition 3.</p>
*
* <p>See ISO 16022:2006, Figure F.5</p>
*
* @param numRows Number of rows in the mapping matrix
* @param numColumns Number of columns in the mapping matrix
* @return byte from the Corner condition 3
*/
private int readCorner3(int numRows, int numColumns) {
int currentByte = 0;
if (readModule(numRows - 1, 0, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(numRows - 1, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 3, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 2, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(1, numColumns - 3, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(1, numColumns - 2, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(1, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
return currentByte;
}
/**
* <p>Reads the 8 bits of the special corner condition 4.</p>
*
* <p>See ISO 16022:2006, Figure F.6</p>
*
* @param numRows Number of rows in the mapping matrix
* @param numColumns Number of columns in the mapping matrix
* @return byte from the Corner condition 4
*/
private int readCorner4(int numRows, int numColumns) {
int currentByte = 0;
if (readModule(numRows - 3, 0, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(numRows - 2, 0, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(numRows - 1, 0, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 2, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(0, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(1, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(2, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
currentByte <<= 1;
if (readModule(3, numColumns - 1, numRows, numColumns)) {
currentByte |= 1;
}
return currentByte;
}
/**
* <p>Extracts the data region from a {@link BitMatrix} that contains
* alignment patterns.</p>
*
* @param bitMatrix Original {@link BitMatrix} with alignment patterns
* @return BitMatrix that has the alignment patterns removed
*/
private BitMatrix extractDataRegion(BitMatrix bitMatrix) {
int symbolSizeRows = version.getSymbolSizeRows();
int symbolSizeColumns = version.getSymbolSizeColumns();
if (bitMatrix.getHeight() != symbolSizeRows) {
throw new IllegalArgumentException("Dimension of bitMatrix must match the version size");
}
int dataRegionSizeRows = version.getDataRegionSizeRows();
int dataRegionSizeColumns = version.getDataRegionSizeColumns();
int numDataRegionsRow = symbolSizeRows / dataRegionSizeRows;
int numDataRegionsColumn = symbolSizeColumns / dataRegionSizeColumns;
int sizeDataRegionRow = numDataRegionsRow * dataRegionSizeRows;
int sizeDataRegionColumn = numDataRegionsColumn * dataRegionSizeColumns;
BitMatrix bitMatrixWithoutAlignment = new BitMatrix(sizeDataRegionColumn, sizeDataRegionRow);
for (int dataRegionRow = 0; dataRegionRow < numDataRegionsRow; ++dataRegionRow) {
int dataRegionRowOffset = dataRegionRow * dataRegionSizeRows;
for (int dataRegionColumn = 0; dataRegionColumn < numDataRegionsColumn; ++dataRegionColumn) {
int dataRegionColumnOffset = dataRegionColumn * dataRegionSizeColumns;
for (int i = 0; i < dataRegionSizeRows; ++i) {
int readRowOffset = dataRegionRow * (dataRegionSizeRows + 2) + 1 + i;
int writeRowOffset = dataRegionRowOffset + i;
for (int j = 0; j < dataRegionSizeColumns; ++j) {
int readColumnOffset = dataRegionColumn * (dataRegionSizeColumns + 2) + 1 + j;
if (bitMatrix.get(readColumnOffset, readRowOffset)) {
int writeColumnOffset = dataRegionColumnOffset + j;
bitMatrixWithoutAlignment.set(writeColumnOffset, writeRowOffset);
}
}
}
}
}
return bitMatrixWithoutAlignment;
}
}
| |
/*
* Mavuno: A Hadoop-Based Text Mining Toolkit
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package edu.isi.mavuno.extract;
import java.io.BufferedWriter;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.log4j.Logger;
import edu.isi.mavuno.util.ContextPatternStatsWritable;
import edu.isi.mavuno.util.ContextPatternWritable;
import edu.isi.mavuno.util.MavunoUtils;
/**
* @author metzler
*
*/
public class Split extends Configured implements Tool {
private static final Logger sLogger = Logger.getLogger(Split.class);
public Split(Configuration conf) {
super(conf);
}
private static class MyMapper extends Mapper<ContextPatternWritable, Writable, ContextPatternWritable, DoubleWritable> {
private final ContextPatternWritable mContext = new ContextPatternWritable();
private final DoubleWritable mResult = new DoubleWritable();
private boolean mIncludePattern = true;
private boolean mIncludeContext = true;
@Override
public void setup(Mapper<ContextPatternWritable, Writable, ContextPatternWritable, DoubleWritable>.Context context) {
Configuration conf = context.getConfiguration();
String splitKey = conf.get("Mavuno.Split.SplitKey").toLowerCase();
if("pattern".equals(splitKey)) { // split on patterns
mIncludePattern = true;
mIncludeContext = false;
}
else if("context".equals(splitKey)) { // split on contexts
mIncludePattern = false;
mIncludeContext = true;
}
else if("pattern+context".equals(splitKey)) { // split on (pattern, context) pairs
mIncludePattern = true;
mIncludeContext = true;
}
else {
throw new RuntimeException("Illegal SplitKey used in Split -- " + splitKey);
}
}
@Override
public void map(ContextPatternWritable key, Writable value, Mapper<ContextPatternWritable, Writable, ContextPatternWritable, DoubleWritable>.Context context) throws IOException, InterruptedException {
// construct context
mContext.setId(key.getId());
if(mIncludePattern) {
mContext.setPattern(key.getPattern());
}
if(mIncludeContext) {
mContext.setContext(key.getContext());
}
if(value instanceof DoubleWritable) {
mResult.set(((DoubleWritable)value).get());
}
else if(value instanceof ContextPatternStatsWritable) {
mResult.set(((ContextPatternStatsWritable)value).weight);
}
// output result
context.write(mContext, mResult);
}
}
private static class MyReducer extends Reducer<ContextPatternWritable, DoubleWritable, ContextPatternWritable, DoubleWritable> {
private static final int MAX_BYTES_PER_SPLIT = 200 * 1024 * 1024;
private String mOutputPath = null;
private BufferedWriter mPatternSplitWriter = null;
private int mBytesProcessed;
private int mCurrentSplit;
@Override
public void setup(Reducer<ContextPatternWritable, DoubleWritable, ContextPatternWritable, DoubleWritable>.Context context) throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
mOutputPath = conf.get("Mavuno.Split.OutputPath", null);
mBytesProcessed = 0;
mCurrentSplit = 0;
mPatternSplitWriter = MavunoUtils.getBufferedWriter(conf, mOutputPath + "/" + mCurrentSplit + ".examples");
}
@Override
public void reduce(ContextPatternWritable key, Iterable<DoubleWritable> values, Reducer<ContextPatternWritable, DoubleWritable, ContextPatternWritable, DoubleWritable>.Context context) throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
if(mBytesProcessed >= MAX_BYTES_PER_SPLIT) {
if(mPatternSplitWriter != null) {
mPatternSplitWriter.close();
}
mBytesProcessed = 0;
mCurrentSplit++;
mPatternSplitWriter = MavunoUtils.getBufferedWriter(conf, mOutputPath + "/" + mCurrentSplit + ".examples");
}
double sum = 0.0;
for(DoubleWritable value : values) {
sum += value.get();
}
mPatternSplitWriter.write(key.toString());
mPatternSplitWriter.write('\t');
mPatternSplitWriter.write(Double.toString(sum));
mPatternSplitWriter.write('\n');
mBytesProcessed += key.getLength();
}
@Override
public void cleanup(Reducer<ContextPatternWritable, DoubleWritable, ContextPatternWritable, DoubleWritable>.Context context) throws IOException, InterruptedException {
if(mPatternSplitWriter != null) {
mPatternSplitWriter.close();
}
}
}
/* (non-Javadoc)
* @see org.apache.hadoop.util.Tool#run(java.lang.String[])
*/
@Override
public int run(String[] args) throws ClassNotFoundException, InterruptedException, IOException {
MavunoUtils.readParameters(args, "Mavuno.Split", getConf());
return run();
}
public int run() throws ClassNotFoundException, InterruptedException, IOException {
Configuration conf = getConf();
String inputPath = MavunoUtils.getRequiredParam("Mavuno.Split.InputPath", conf);
String outputPath = MavunoUtils.getRequiredParam("Mavuno.Split.OutputPath", conf);
String splitKey = MavunoUtils.getRequiredParam("Mavuno.Split.SplitKey", conf);
sLogger.info("Tool name: Split");
sLogger.info(" - Input path: " + inputPath);
sLogger.info(" - Output path: " + outputPath);
sLogger.info(" - Split key: " + splitKey);
Job job = new Job(conf);
job.setJobName("Split");
job.setJarByClass(Split.class);
MavunoUtils.recursivelyAddInputPaths(job, inputPath);
FileOutputFormat.setOutputPath(job, new Path(outputPath));
job.setInputFormatClass(SequenceFileInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setMapOutputKeyClass(ContextPatternWritable.class);
job.setSortComparatorClass(ContextPatternWritable.Comparator.class);
job.setPartitionerClass(ContextPatternWritable.FullPartitioner.class);
job.setMapOutputValueClass(DoubleWritable.class);
job.setOutputKeyClass(ContextPatternWritable.class);
job.setOutputValueClass(DoubleWritable.class);
job.setMapperClass(MyMapper.class);
job.setReducerClass(MyReducer.class);
job.setNumReduceTasks(1);
job.waitForCompletion(true);
return 0;
}
}
| |
package org.apache.streams.filters.test;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Sets;
import org.apache.streams.core.StreamsDatum;
import org.apache.streams.filters.VerbDefinitionDropFilter;
import org.apache.streams.filters.VerbDefinitionKeepFilter;
import org.apache.streams.jackson.StreamsJacksonMapper;
import org.apache.streams.pojo.json.Activity;
import org.apache.streams.verbs.VerbDefinition;
import org.apache.streams.verbs.VerbDefinitionResolver;
import org.junit.Ignore;
import org.junit.Test;
import java.util.List;
/**
* Tests for {$link: org.apache.streams.verbs.VerbDefinitionResolver}
*/
public class VerbDefinitionFilterTest {
ObjectMapper mapper = StreamsJacksonMapper.getInstance();
/**
* Test verb match filter alone
*/
@Test
public void testVerbMatchFilter() throws Exception {
VerbDefinition definition = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
VerbDefinitionKeepFilter filter = new VerbDefinitionKeepFilter(Sets.newHashSet(definition));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"notpost\"}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 0;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\"}\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 1;
}
/**
* Test provider filter, if provider has wrong type it should not pass
*/
@Test
public void testProviderFilter() throws Exception {
VerbDefinition definition = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/provider.json"), VerbDefinition.class);
VerbDefinitionKeepFilter filter = new VerbDefinitionKeepFilter(Sets.newHashSet(definition));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"product\"}}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 0;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"application\"}}\n\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 1;
}
/**
* Test actor filter, if actor isn't present it should not pass
*/
@Test
public void testActorFilter() throws Exception {
VerbDefinition definition = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/actor.json"), VerbDefinition.class);
VerbDefinitionKeepFilter filter = new VerbDefinitionKeepFilter(Sets.newHashSet(definition));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"page\"}}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 0;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"person\"}}}\n\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 1;
}
/**
* Test object filter, if object doesn't have a type it should not pass
*/
@Test
public void testObjectFilter() throws Exception {
VerbDefinition definition = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/object.json"), VerbDefinition.class);
VerbDefinitionKeepFilter filter = new VerbDefinitionKeepFilter(Sets.newHashSet(definition));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\"}}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 0;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\",\"objectType\":\"task\"}}}\n\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 1;
}
/**
* Test actor and object filter together
*/
@Test
public void testMultiFilter() throws Exception {
VerbDefinition definition = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/follow.json"), VerbDefinition.class);
VerbDefinitionKeepFilter filter = new VerbDefinitionKeepFilter(Sets.newHashSet(definition));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"follow\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"page\"}}}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 0;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"2\",\"verb\":\"follow\",\"object\":{\"id\":\"objectId\",\"objectType\":\"page\"}}}\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 0;
StreamsDatum datum3 = new StreamsDatum(mapper.readValue("{\"id\":\"3\",\"verb\":\"follow\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"page\"},\"object\":{\"id\":\"objectId\",\"objectType\":\"page\"}}}\n", Activity.class));
List<StreamsDatum> result3 = filter.process(datum3);
assert result3.size() == 1;
}
/**
* Test targetRequired
*/
@Test
public void testTargetRequired() throws Exception {
VerbDefinition definition = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/targetrequired.json"), VerbDefinition.class);
VerbDefinitionKeepFilter filter = new VerbDefinitionKeepFilter(Sets.newHashSet(definition));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\",\"objectType\":\"task\"}}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 0;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"target\":{\"id\":\"targetId\",\"objectType\":\"group\"}}\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 1;
}
/**
* Test that wildcard verb definition matches every item
*/
@Test
public void testAllWildcard() throws Exception {
VerbDefinition definition = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
VerbDefinitionKeepFilter filter = new VerbDefinitionKeepFilter(Sets.newHashSet(definition));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"notpost\"}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 0;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\"}\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 1;
StreamsDatum datum3 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"product\"}}\n", Activity.class));
List<StreamsDatum> result3 = filter.process(datum3);
assert result3.size() == 1;
StreamsDatum datum4 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"application\"}}\n\n", Activity.class));
List<StreamsDatum> result4 = filter.process(datum4);
assert result4.size() == 1;
StreamsDatum datum5 = new StreamsDatum(mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\",\"objectType\":\"task\"}}\n", Activity.class));
List<StreamsDatum> result5 = filter.process(datum5);
assert result5.size() == 1;
StreamsDatum datum6 = new StreamsDatum(mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"target\":{\"id\":\"targetId\",\"objectType\":\"group\"}}\n", Activity.class));
List<StreamsDatum> result6 = filter.process(datum6);
assert result6.size() == 1;
StreamsDatum datum7 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"page\"}}\n", Activity.class));
List<StreamsDatum> result7 = filter.process(datum7);
assert result7.size() == 1;
}
/**
* Test that multiple verb definitions chain properly
*/
@Test
public void testAllMultipleDefinition() throws Exception {
VerbDefinition provider = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/provider.json"), VerbDefinition.class);
VerbDefinition actor = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/actor.json"), VerbDefinition.class);
VerbDefinition object = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/object.json"), VerbDefinition.class);
VerbDefinition target = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/targetrequired.json"), VerbDefinition.class);
VerbDefinition follow = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/follow.json"), VerbDefinition.class);
VerbDefinitionKeepFilter filter = new VerbDefinitionKeepFilter(Sets.newHashSet(provider,actor,object,target,follow));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"notpost\"}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 0;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\"}\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 1;
StreamsDatum datum3 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"product\"}}\n", Activity.class));
List<StreamsDatum> result3 = filter.process(datum3);
assert result3.size() == 1;
StreamsDatum datum4 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"application\"}}\n\n", Activity.class));
List<StreamsDatum> result4 = filter.process(datum4);
assert result4.size() == 1;
StreamsDatum datum5 = new StreamsDatum(mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\",\"objectType\":\"task\"}}\n", Activity.class));
List<StreamsDatum> result5 = filter.process(datum5);
assert result5.size() == 1;
StreamsDatum datum6 = new StreamsDatum(mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"target\":{\"id\":\"targetId\",\"objectType\":\"group\"}}\n", Activity.class));
List<StreamsDatum> result6 = filter.process(datum6);
assert result6.size() == 1;
StreamsDatum datum7 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"page\"}}\n", Activity.class));
List<StreamsDatum> result7 = filter.process(datum7);
assert result7.size() == 1;
StreamsDatum datum9 = new StreamsDatum(mapper.readValue("{\"id\":\"3\",\"verb\":\"follow\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"page\"},\"object\":{\"id\":\"objectId\",\"objectType\":\"page\"}}}\n", Activity.class));
List<StreamsDatum> result9 = filter.process(datum9);
assert result9.size() == 1;
}
/**
* Test verb drop filter alone
*/
@Test
public void testVerbDropFilter() throws Exception {
VerbDefinition definition = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
VerbDefinitionDropFilter filter = new VerbDefinitionDropFilter(Sets.newHashSet(definition));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"notpost\"}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 1;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\"}\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 0;
}
/**
* Test that wildcard verb definition will drop every item
*/
@Test
public void testDropAllWildcard() throws Exception {
VerbDefinition definition = mapper.readValue(VerbDefinitionFilterTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
VerbDefinitionDropFilter filter = new VerbDefinitionDropFilter(Sets.newHashSet(definition));
filter.prepare(null);
StreamsDatum datum1 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"notpost\"}\n", Activity.class));
List<StreamsDatum> result1 = filter.process(datum1);
assert result1.size() == 1;
StreamsDatum datum2 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\"}\n", Activity.class));
List<StreamsDatum> result2 = filter.process(datum2);
assert result2.size() == 0;
StreamsDatum datum3 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"product\"}}\n", Activity.class));
List<StreamsDatum> result3 = filter.process(datum3);
assert result3.size() == 0;
StreamsDatum datum4 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"application\"}}\n\n", Activity.class));
List<StreamsDatum> result4 = filter.process(datum4);
assert result4.size() == 0;
StreamsDatum datum5 = new StreamsDatum(mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\",\"objectType\":\"task\"}}\n", Activity.class));
List<StreamsDatum> result5 = filter.process(datum5);
assert result5.size() == 0;
StreamsDatum datum6 = new StreamsDatum(mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"target\":{\"id\":\"targetId\",\"objectType\":\"group\"}}\n", Activity.class));
List<StreamsDatum> result6 = filter.process(datum6);
assert result6.size() == 0;
StreamsDatum datum7 = new StreamsDatum(mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"page\"}}\n", Activity.class));
List<StreamsDatum> result7 = filter.process(datum7);
assert result7.size() == 0;
}
}
| |
/*
* Copyright 2005 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.rule;
import org.drools.core.WorkingMemory;
import org.drools.core.base.ClassFieldAccessorCache;
import org.drools.core.base.ClassFieldAccessorStore;
import org.drools.core.base.ClassFieldReader;
import org.drools.core.base.ClassObjectType;
import org.drools.core.base.FieldFactory;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.impl.InternalKnowledgeBase;
import org.drools.core.impl.StatefulKnowledgeSessionImpl;
import org.drools.core.reteoo.LeftTupleImpl;
import org.drools.core.reteoo.RightTupleImpl;
import org.drools.core.rule.PredicateConstraint.PredicateContextEntry;
import org.drools.core.rule.constraint.MvelConstraint;
import org.drools.core.spi.InternalReadAccessor;
import org.drools.core.spi.PredicateExpression;
import org.drools.core.spi.Tuple;
import org.drools.core.test.model.Cheese;
import org.junit.Before;
import org.junit.Test;
import org.kie.internal.KnowledgeBaseFactory;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class FieldConstraintTest {
ClassFieldAccessorStore store = new ClassFieldAccessorStore();
@Before
public void setUp() throws Exception {
store.setClassFieldAccessorCache( new ClassFieldAccessorCache( Thread.currentThread().getContextClassLoader() ) );
store.setEagerWire( true );
}
public FieldConstraintTest() {
}
/**
* <pre>
*
*
* ( Cheese (type "cheddar") )
*
*
* </pre>
*
* This is currently the same as using a ReturnValueConstraint just that it
* doesn't need any requiredDeclarations
*/
@Test
public void testLiteralConstraint() {
InternalKnowledgeBase kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase();
StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession();
final ClassFieldReader extractor = store.getReader(Cheese.class,
"type");
final MvelConstraint constraint = new MvelConstraintTestUtil( "type == \"cheddar\"",
FieldFactory.getInstance().getFieldValue( "cheddar" ),
extractor );
final Cheese cheddar = new Cheese( "cheddar",
5 );
final InternalFactHandle cheddarHandle = (InternalFactHandle) ksession.insert( cheddar );
// check constraint
assertTrue( constraint.isAllowed( cheddarHandle,
ksession ) );
final Cheese stilton = new Cheese( "stilton",
5 );
final InternalFactHandle stiltonHandle = (InternalFactHandle) ksession.insert( stilton );
// check constraint
assertFalse( constraint.isAllowed( stiltonHandle,
ksession ) );
}
/**
* <pre>
*
*
* Cheese( price == 5 )
*
*
* </pre>
*/
@Test
public void testPrimitiveLiteralConstraint() {
InternalKnowledgeBase kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase();
StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession();
final ClassFieldReader extractor = store.getReader(Cheese.class,
"price");
final MvelConstraint constraint = new MvelConstraintTestUtil( "price == 5",
FieldFactory.getInstance().getFieldValue( 5 ),
extractor );
final Cheese cheddar = new Cheese( "cheddar",
5 );
final InternalFactHandle cheddarHandle = (InternalFactHandle) ksession.insert( cheddar );
// check constraint
assertTrue( constraint.isAllowed( cheddarHandle,
ksession ) );
final Cheese stilton = new Cheese( "stilton",
10 );
final InternalFactHandle stiltonHandle = (InternalFactHandle) ksession.insert( stilton );
// check constraint
assertFalse(constraint.isAllowed(stiltonHandle,
ksession));
}
/**
* <pre>
*
*
* (Cheese (price ?price1 )
* (Cheese (price ?price2&:(= ?price2 (* 2 ?price1) )
*
*
* </pre>
*/
@Test
public void testPredicateConstraint() {
InternalKnowledgeBase kBase = (InternalKnowledgeBase) KnowledgeBaseFactory.newKnowledgeBase();
StatefulKnowledgeSessionImpl ksession = (StatefulKnowledgeSessionImpl)kBase.newStatefulKnowledgeSession();
final InternalReadAccessor priceExtractor = store.getReader( Cheese.class,
"price" );
Pattern pattern = new Pattern( 0,
new ClassObjectType( Cheese.class ) );
// Bind the extractor to a decleration
// Declarations know the pattern they derive their value form
final Declaration price1Declaration = new Declaration( "price1",
priceExtractor,
pattern );
pattern = new Pattern( 1,
new ClassObjectType( Cheese.class ) );
// Bind the extractor to a decleration
// Declarations know the pattern they derive their value form
final Declaration price2Declaration = new Declaration( "price2",
priceExtractor,
pattern );
final PredicateExpression evaluator = new PredicateExpression() {
private static final long serialVersionUID = 510l;
public boolean evaluate(InternalFactHandle handle,
Tuple tuple,
Declaration[] previousDeclarations,
Declaration[] localDeclarations,
WorkingMemory workingMemory,
Object context) {
int price1 = previousDeclarations[0].getIntValue( (InternalWorkingMemory) workingMemory,
tuple.getObject( previousDeclarations[0] ) );
int price2 = localDeclarations[0].getIntValue( (InternalWorkingMemory) workingMemory,
handle.getObject() );
return (price2 == (price1 * 2));
}
public Object createContext() {
return null;
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
};
final PredicateConstraint constraint1 = new PredicateConstraint( evaluator,
new Declaration[]{price1Declaration},
new Declaration[]{price2Declaration},
new String[]{},
new String[]{});
final Cheese cheddar0 = new Cheese( "cheddar",
5 );
final InternalFactHandle f0 = (InternalFactHandle) ksession.insert( cheddar0 );
LeftTupleImpl tuple = new LeftTupleImpl( f0,
null,
true );
final Cheese cheddar1 = new Cheese( "cheddar",
10 );
final InternalFactHandle f1 = (InternalFactHandle) ksession.insert( cheddar1 );
tuple = new LeftTupleImpl( tuple,
new RightTupleImpl( f1, null ),
null,
true );
final PredicateContextEntry context = (PredicateContextEntry) constraint1.createContextEntry();
context.updateFromTuple(ksession,
tuple);
assertTrue( constraint1.isAllowedCachedLeft( context,
f1 ) );
}
}
| |
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.segment;
import com.google.common.base.Function;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.metamx.common.ISE;
import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.logger.Logger;
import io.druid.segment.column.BitmapIndex;
import io.druid.segment.column.Column;
import io.druid.segment.column.ColumnCapabilities;
import io.druid.segment.column.ComplexColumn;
import io.druid.segment.column.DictionaryEncodedColumn;
import io.druid.segment.column.GenericColumn;
import io.druid.segment.column.IndexedFloatsGenericColumn;
import io.druid.segment.column.IndexedLongsGenericColumn;
import io.druid.segment.column.ValueType;
import io.druid.segment.data.ArrayBasedIndexedInts;
import io.druid.segment.data.BitmapCompressedIndexedInts;
import io.druid.segment.data.EmptyIndexedInts;
import io.druid.segment.data.Indexed;
import io.druid.segment.data.IndexedInts;
import io.druid.segment.data.IndexedIterable;
import io.druid.segment.data.ListIndexed;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
/**
*/
public class QueryableIndexIndexableAdapter implements IndexableAdapter
{
private static final Logger log = new Logger(QueryableIndexIndexableAdapter.class);
private final int numRows;
private final QueryableIndex input;
private final List<String> availableDimensions;
public QueryableIndexIndexableAdapter(QueryableIndex input)
{
this.input = input;
numRows = input.getNumRows();
// It appears possible that the dimensions have some columns listed which do not have a DictionaryEncodedColumn
// This breaks current logic, but should be fine going forward. This is a work-around to make things work
// in the current state. This code shouldn't be needed once github tracker issue #55 is finished.
this.availableDimensions = Lists.newArrayList();
for (String dim : input.getAvailableDimensions()) {
final Column col = input.getColumn(dim);
if (col == null) {
log.warn("Wtf!? column[%s] didn't exist!?!?!?", dim);
} else if (col.getDictionaryEncoding() != null) {
availableDimensions.add(dim);
} else {
log.info("No dictionary on dimension[%s]", dim);
}
}
}
@Override
public Interval getDataInterval()
{
return input.getDataInterval();
}
@Override
public int getNumRows()
{
return numRows;
}
@Override
public Indexed<String> getDimensionNames()
{
return new ListIndexed<>(availableDimensions, String.class);
}
@Override
public Indexed<String> getMetricNames()
{
final Set<String> columns = Sets.newLinkedHashSet(input.getColumnNames());
final HashSet<String> dimensions = Sets.newHashSet(getDimensionNames());
return new ListIndexed<>(
Lists.newArrayList(Sets.difference(columns, dimensions)),
String.class
);
}
@Override
public Indexed<String> getDimValueLookup(String dimension)
{
final Column column = input.getColumn(dimension);
if (column == null) {
return null;
}
final DictionaryEncodedColumn dict = column.getDictionaryEncoding();
if (dict == null) {
return null;
}
return new Indexed<String>()
{
@Override
public Class<? extends String> getClazz()
{
return String.class;
}
@Override
public int size()
{
return dict.getCardinality();
}
@Override
public String get(int index)
{
return dict.lookupName(index);
}
@Override
public int indexOf(String value)
{
return dict.lookupId(value);
}
@Override
public Iterator<String> iterator()
{
return IndexedIterable.create(this).iterator();
}
};
}
@Override
public Iterable<Rowboat> getRows()
{
return new Iterable<Rowboat>()
{
@Override
public Iterator<Rowboat> iterator()
{
return new Iterator<Rowboat>()
{
final GenericColumn timestamps = input.getColumn(Column.TIME_COLUMN_NAME).getGenericColumn();
final Object[] metrics;
final DictionaryEncodedColumn[] dictionaryEncodedColumns;
final int numMetrics = getMetricNames().size();
int currRow = 0;
boolean done = false;
{
this.dictionaryEncodedColumns = FluentIterable
.from(getDimensionNames())
.transform(
new Function<String, DictionaryEncodedColumn>()
{
@Override
public DictionaryEncodedColumn apply(String dimName)
{
return input.getColumn(dimName)
.getDictionaryEncoding();
}
}
).toArray(DictionaryEncodedColumn.class);
final Indexed<String> availableMetrics = getMetricNames();
metrics = new Object[availableMetrics.size()];
for (int i = 0; i < metrics.length; ++i) {
final Column column = input.getColumn(availableMetrics.get(i));
final ValueType type = column.getCapabilities().getType();
switch (type) {
case FLOAT:
case LONG:
metrics[i] = column.getGenericColumn();
break;
case COMPLEX:
metrics[i] = column.getComplexColumn();
break;
default:
throw new ISE("Cannot handle type[%s]", type);
}
}
}
@Override
public boolean hasNext()
{
final boolean hasNext = currRow < numRows;
if (!hasNext && !done) {
CloseQuietly.close(timestamps);
for (Object metric : metrics) {
if (metric instanceof Closeable) {
CloseQuietly.close((Closeable) metric);
}
}
for (Object dimension : dictionaryEncodedColumns) {
if (dimension instanceof Closeable) {
CloseQuietly.close((Closeable) dimension);
}
}
done = true;
}
return hasNext;
}
@Override
public Rowboat next()
{
if (!hasNext()) {
throw new NoSuchElementException();
}
final int[][] dims = new int[dictionaryEncodedColumns.length][];
int dimIndex = 0;
for (final DictionaryEncodedColumn dict : dictionaryEncodedColumns) {
final IndexedInts dimVals;
if (dict.hasMultipleValues()) {
dimVals = dict.getMultiValueRow(currRow);
} else {
dimVals = new ArrayBasedIndexedInts(new int[]{dict.getSingleValueRow(currRow)});
}
int[] theVals = new int[dimVals.size()];
for (int j = 0; j < theVals.length; ++j) {
theVals[j] = dimVals.get(j);
}
dims[dimIndex++] = theVals;
}
Object[] metricArray = new Object[numMetrics];
for (int i = 0; i < metricArray.length; ++i) {
if (metrics[i] instanceof IndexedFloatsGenericColumn) {
metricArray[i] = ((GenericColumn) metrics[i]).getFloatSingleValueRow(currRow);
} else if (metrics[i] instanceof IndexedLongsGenericColumn) {
metricArray[i] = ((GenericColumn) metrics[i]).getLongSingleValueRow(currRow);
} else if (metrics[i] instanceof ComplexColumn) {
metricArray[i] = ((ComplexColumn) metrics[i]).getRowValue(currRow);
}
}
final Rowboat retVal = new Rowboat(
timestamps.getLongSingleValueRow(currRow), dims, metricArray, currRow
);
++currRow;
return retVal;
}
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
};
}
};
}
@Override
public IndexedInts getBitmapIndex(String dimension, String value)
{
final Column column = input.getColumn(dimension);
if (column == null) {
return new EmptyIndexedInts();
}
final BitmapIndex bitmaps = column.getBitmapIndex();
if (bitmaps == null) {
return new EmptyIndexedInts();
}
return new BitmapCompressedIndexedInts(bitmaps.getBitmap(value));
}
@Override
public String getMetricType(String metric)
{
final Column column = input.getColumn(metric);
final ValueType type = column.getCapabilities().getType();
switch (type) {
case FLOAT:
return "float";
case LONG:
return "long";
case COMPLEX:
return column.getComplexColumn().getTypeName();
default:
throw new ISE("Unknown type[%s]", type);
}
}
@Override
public ColumnCapabilities getCapabilities(String column)
{
return input.getColumn(column).getCapabilities();
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.bookkeeper.bookie;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is the file handle for a ledger's index file that maps entry ids to location.
* It is used by LedgerCache.
*
* <p>
* Ledger index file is made of a header and several fixed-length index pages, which records the offsets of data stored in entry loggers
* <pre><header><index pages></pre>
* <b>Header</b> is formated as below:
* <pre><magic bytes><len of master key><master key></pre>
* <ul>
* <li>magic bytes: 8 bytes, 'BKLE\0\0\0\0'
* <li>len of master key: indicates length of master key. -1 means no master key stored in header.
* <li>master key: master key
* </ul>
* <b>Index page</b> is a fixed-length page, which contains serveral entries which point to the offsets of data stored in entry loggers.
* </p>
*/
class FileInfo {
static Logger LOG = LoggerFactory.getLogger(FileInfo.class);
static final int NO_MASTER_KEY = -1;
private FileChannel fc;
private final File lf;
/**
* The fingerprint of a ledger index file
*/
private byte header[] = "BKLE\0\0\0\0".getBytes();
static final long START_OF_DATA = 1024;
private long size;
private int useCount;
private boolean isClosed;
public FileInfo(File lf) throws IOException {
this.lf = lf;
fc = new RandomAccessFile(lf, "rws").getChannel();
size = fc.size();
if (size == 0) {
fc.write(ByteBuffer.wrap(header));
// write NO_MASTER_KEY, which means there is no master key
ByteBuffer buf = ByteBuffer.allocate(4);
buf.putInt(NO_MASTER_KEY);
buf.flip();
fc.write(buf);
}
}
/**
* Write master key to index file header
*
* @param masterKey master key to store
* @return void
* @throws IOException
*/
synchronized public void writeMasterKey(byte[] masterKey) throws IOException {
// write master key
if (masterKey == null ||
masterKey.length + 4 + header.length > START_OF_DATA) {
throw new IOException("master key is more than " + (START_OF_DATA - 4 - header.length));
}
int len = masterKey.length;
ByteBuffer lenBuf = ByteBuffer.allocate(4);
lenBuf.putInt(len);
lenBuf.flip();
fc.position(header.length);
fc.write(lenBuf);
fc.write(ByteBuffer.wrap(masterKey));
}
/**
* Read master key
*
* @return master key. null means no master key stored in index header
* @throws IOException
*/
synchronized public byte[] readMasterKey() throws IOException {
ByteBuffer lenBuf = ByteBuffer.allocate(4);
int total = readAbsolute(lenBuf, header.length);
if (total != 4) {
throw new IOException("Short read during reading master key length");
}
lenBuf.rewind();
int len = lenBuf.getInt();
if (len == NO_MASTER_KEY) {
return null;
}
byte[] masterKey = new byte[len];
total = readAbsolute(ByteBuffer.wrap(masterKey), header.length + 4);
if (total != len) {
throw new IOException("Short read during reading master key");
}
return masterKey;
}
synchronized public long size() {
long rc = size-START_OF_DATA;
if (rc < 0) {
rc = 0;
}
return rc;
}
synchronized public int read(ByteBuffer bb, long position) throws IOException {
return readAbsolute(bb, position + START_OF_DATA);
}
private int readAbsolute(ByteBuffer bb, long start) throws IOException {
int total = 0;
while(bb.remaining() > 0) {
int rc = fc.read(bb, start);
if (rc <= 0) {
throw new IOException("Short read");
}
total += rc;
// should move read position
start += rc;
}
return total;
}
synchronized public void close() throws IOException {
isClosed = true;
if (useCount == 0) {
fc.close();
}
}
synchronized public long write(ByteBuffer[] buffs, long position) throws IOException {
long total = 0;
try {
fc.position(position+START_OF_DATA);
while(buffs[buffs.length-1].remaining() > 0) {
long rc = fc.write(buffs);
if (rc <= 0) {
throw new IOException("Short write");
}
total += rc;
}
} finally {
long newsize = position+START_OF_DATA+total;
if (newsize > size) {
size = newsize;
}
}
return total;
}
synchronized public void use() {
useCount++;
}
synchronized public void release() {
useCount--;
if (isClosed && useCount == 0) {
try {
fc.close();
} catch (IOException e) {
LOG.error("Error closing file channel", e);
}
}
}
/**
* Getter to a handle on the actual ledger index file.
* This is used when we are deleting a ledger and want to physically remove the index file.
*/
File getFile() {
return lf;
}
}
| |
/*
* Copyright (C) 2011 Stefan Willinger
* wis775@users.sourceforge.net
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package net.sourceforge.squirrel_sql.client.preferences;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.util.ArrayList;
import java.util.List;
import javax.swing.BorderFactory;
import javax.swing.DefaultListModel;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.ListSelectionModel;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import net.sourceforge.squirrel_sql.client.IApplication;
import net.sourceforge.squirrel_sql.fw.gui.action.wikiTable.GenericWikiTableConfigurationBean;
import net.sourceforge.squirrel_sql.fw.gui.action.wikiTable.IWikiTableConfiguration;
import net.sourceforge.squirrel_sql.fw.gui.action.wikiTable.IWikiTableConfigurationFactory;
import net.sourceforge.squirrel_sql.fw.gui.action.wikiTable.WikiTableConfigurationFactory;
import net.sourceforge.squirrel_sql.fw.util.StringManager;
import net.sourceforge.squirrel_sql.fw.util.StringManagerFactory;
import org.apache.commons.lang.StringUtils;
/**
* Preferences panel for WIKI table configurations
* @author Stefan Willinger
*
*/
public class WikiTablePreferencesPanel extends JPanel {
/** Internationalized strings for this class. */
private static final StringManager s_stringMgr =
StringManagerFactory.getStringManager(WikiTablePreferencesPanel.class);
private final Insets LABEL_INSETS = new Insets(2, 28, 6, 0);
private final Insets FIELD_INSETS = new Insets(2, 8, 6, 28);
private IWikiTableConfigurationFactory wikiTableConfigFactory = WikiTableConfigurationFactory.getInstance();
private IApplication application;
// Components of the detail panel
private JLabel nameLable = null;
private JTextField name = null;
private JLabel tableStartTagLabel = null;
private JTextField tableStartTag = null;
private JLabel headerStartTagLabel = null;
private JTextField headerStartTag = null;
private JLabel headerCellTagLabel = null;
private JTextField headerCellTag = null;
private JLabel headerEndTagLabel = null;
private JTextField headerEndTag = null;
private JLabel rowStartTagLabel = null;
private JTextField rowStartTag = null;
private JLabel cellTagLabel = null;
private JTextField cellTag = null;
private JLabel rowEndTagLabel = null;
private JTextField rowEndTag = null;
private JLabel tableEndTagLabel = null;
private JTextField tableEndTag = null;
private JLabel noWikiTagLabel = null;
private JTextField noWikiTag = null;
private DefaultListModel wikiConfigListModel = null;
private JList wikiConfigList;
private JButton newButton;
private JButton copyButton;
private JButton deleteButton;
private JTable exampleTable;
private JTextArea exampleText;
private IWikiTableConfiguration currentConfigurationInView;
private JCheckBox enabled;
public WikiTablePreferencesPanel()
{
super(new GridBagLayout());
createUserInterface();
}
/**
* Creates the user Interface
*/
private void createUserInterface()
{
JPanel jp = new JPanel(new GridBagLayout());
GridBagConstraints gbc = new GridBagConstraints();
gbc.fill = GridBagConstraints.BOTH;
gbc.insets = new Insets(4, 4, 4, 4);
gbc.gridx = 0;
gbc.gridy = 0;
gbc.weightx = 0;
gbc.weighty = 1;
jp.add(createOverviewPanel(), gbc);
gbc.gridy = 1;
gbc.weighty = 0;
gbc.fill = GridBagConstraints.HORIZONTAL;
jp.add(createNotePanel(), gbc);
gbc.weightx = 1;
gbc.weighty = 1;
gbc.gridheight=2;
gbc.gridy = 0;
gbc.gridx =1;
gbc.fill = GridBagConstraints.BOTH;
jp.add(createDetailPanel(), gbc);
gbc.weightx = 1;
gbc.weighty = 1;
gbc.gridheight=2;
gbc.gridy = 0;
gbc.gridx =2;
gbc.fill = GridBagConstraints.BOTH;
jp.add(createExamplePanel(), gbc);
JScrollPane sp = new JScrollPane(jp);
gbc = new GridBagConstraints();
gbc.fill=GridBagConstraints.BOTH;
gbc.weightx=1;
gbc.weighty=1;
add(sp,gbc);
}
/**
* Create the component, which shows a example.
* The example contains a {@link JTable} and a {@link JTextField}. If the user change a element of the configuration, then the example will be updated.
* So we can provide a feedback of the result immediately.
*/
private Component createExamplePanel() {
JPanel jp = new JPanel(new GridBagLayout());
jp.setBorder(BorderFactory.createTitledBorder(s_stringMgr.getString("WikiTablePreferencesPanel.titleExample"))); //$NON-NLS-1$
GridBagConstraints gbc = new GridBagConstraints();
String[][] rowData = new String[][]{
{s_stringMgr.getString("WikiTablePreferencesPanel.austria"), s_stringMgr.getString("WikiTablePreferencesPanel.vienna")}, //$NON-NLS-1$ //$NON-NLS-2$
{s_stringMgr.getString("WikiTablePreferencesPanel.italy"), s_stringMgr.getString("WikiTablePreferencesPanel.rome")} //$NON-NLS-1$ //$NON-NLS-2$
};
String[] columnNames = new String[]{s_stringMgr.getString("WikiTablePreferencesPanel.country"), s_stringMgr.getString("WikiTablePreferencesPanel.captial")}; //$NON-NLS-1$ //$NON-NLS-2$
exampleTable = new JTable(rowData, columnNames);
exampleTable.setMinimumSize(new Dimension(10,10));
JScrollPane sp = new JScrollPane(exampleTable);
sp.setPreferredSize(new Dimension(100,50));
gbc.fill=GridBagConstraints.BOTH;
gbc.weighty=1;
gbc.weighty=0.1;
jp.add(sp, gbc);
gbc.gridx=0;
gbc.gridy=1;
gbc.fill=GridBagConstraints.HORIZONTAL;
gbc.weighty=0;
gbc.weightx=0;
jp.add(new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.titleResultExample")), gbc); //$NON-NLS-1$
exampleText = new JTextArea(15,20);
exampleText.setWrapStyleWord(true);
exampleText.setEditable(false);
exampleText.setLineWrap(false);
gbc.gridy=2;
gbc.fill=GridBagConstraints.BOTH;
gbc.weighty=1;
gbc.weightx=1;
sp = new JScrollPane(exampleText);
jp.add(sp, gbc);
return jp;
}
/**
* Create the panel with a small help text for the user.
* This panel indicates, which variables are allowed.
*/
private Component createNotePanel() {
JPanel jp = new JPanel();
jp.setBorder(BorderFactory.createTitledBorder("Note")); //$NON-NLS-1$
String text = "<html><body>"+ //$NON-NLS-1$
s_stringMgr.getString("WikiTablePreferencesPanel.hintValueVariable") + //$NON-NLS-1$
"<br />" + //$NON-NLS-1$
s_stringMgr.getString("WikiTablePreferencesPanel.hintNewLine") + //$NON-NLS-1$
"</body></html>"; //$NON-NLS-1$
JLabel label = new JLabel(text);
jp.add(label);
return jp;
}
/**
* The overview panel contains a list of all available configurations.
* There are some methods provided for creating, copying or deleting configurations.
* @return
*/
private Component createOverviewPanel() {
JPanel jp = new JPanel(new BorderLayout());
jp.setBorder(BorderFactory.createTitledBorder(s_stringMgr.getString("WikiTablePreferencesPanel.titleOverview"))); //$NON-NLS-1$
wikiConfigListModel = new DefaultListModel();
wikiConfigList = new JList(wikiConfigListModel);
wikiConfigList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
wikiConfigList.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
IWikiTableConfiguration selectedValue = (IWikiTableConfiguration) wikiConfigList.getSelectedValue();
showDataFor(selectedValue);
if(selectedValue != null){
copyButton.setEnabled(true);
}else{
copyButton.setEnabled(false);
}
}
});
}
});
JScrollPane sp = new JScrollPane(wikiConfigList);
JPanel buttonPanel = new JPanel(new FlowLayout());
newButton = new JButton(s_stringMgr.getString("WikiTablePreferencesPanel.new")); //$NON-NLS-1$
newButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
IWikiTableConfiguration newConfig = new GenericWikiTableConfigurationBean();
addNewConfigToList(newConfig);
}
private void addNewConfigToList(IWikiTableConfiguration newConfig) {
wikiConfigListModel.addElement(newConfig);
wikiConfigList.setSelectedValue(newConfig, true);
}
});
buttonPanel.add(newButton);
copyButton = new JButton(s_stringMgr.getString("WikiTablePreferencesPanel.copy")); //$NON-NLS-1$
copyButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
IWikiTableConfiguration newConfig = ((IWikiTableConfiguration)wikiConfigList.getSelectedValue()).copyAsUserSpecific();
int suffix = 0;
String originalName = newConfig.getName();
do{
suffix++;
newConfig.setName(originalName+"_"+suffix); //$NON-NLS-1$
}while(!isUniqueName(newConfig));
addNewConfigToList(newConfig);
}
});
buttonPanel.add(copyButton);
deleteButton = new JButton(s_stringMgr.getString("WikiTablePreferencesPanel.delete")); //$NON-NLS-1$
deleteButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
int selectedIndex = wikiConfigList.getSelectedIndex();
wikiConfigList.setSelectedIndex(0);
wikiConfigListModel.remove(selectedIndex);
// we always have at least on item - the build in configuration
// wikiConfigList.invalidate();
}
});
buttonPanel.add(deleteButton);
jp.add(sp, BorderLayout.CENTER);
jp.add(buttonPanel, BorderLayout.SOUTH);
return jp;
}
/**
* The detail panel contains all information of a specific configuration.
* The user can edit the configuration in this panel.
*/
private JPanel createDetailPanel(){
JPanel jp = new JPanel(new GridBagLayout());
jp.setBorder(BorderFactory.createTitledBorder(s_stringMgr.getString("WikiTablePreferencesPanel.titleDetailPanel"))); //$NON-NLS-1$
jp.setMinimumSize(new Dimension(400,50));
final GridBagConstraints gbc = new GridBagConstraints();
int row= 0;
setLabelConstraints(gbc, row);
nameLable = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.name"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(nameLable, gbc);
setFieldConstraints(gbc, row);
name = new JTextField(30);
jp.add(name, gbc);
setLabelConstraints(gbc, ++row);
tableStartTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.tableStartTag"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(tableStartTagLabel, gbc);
setFieldConstraints(gbc, row);
tableStartTag = new JTextField(30);
jp.add(tableStartTag, gbc);
setLabelConstraints(gbc, ++row);
headerStartTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.headerStartTag"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(headerStartTagLabel, gbc);
setFieldConstraints(gbc, row);
headerStartTag = new JTextField(30);
jp.add(headerStartTag, gbc);
setLabelConstraints(gbc, ++row);
headerCellTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.headerCellTag"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(headerCellTagLabel, gbc);
setFieldConstraints(gbc, row);
headerCellTag = new JTextField(30);
jp.add(headerCellTag, gbc);
setLabelConstraints(gbc, ++row);
headerEndTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.headerEndTag"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(headerEndTagLabel, gbc);
setFieldConstraints(gbc, row);
headerEndTag = new JTextField(30);
jp.add(headerEndTag, gbc);
setLabelConstraints(gbc, ++row);
rowStartTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.rowStartTag"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(rowStartTagLabel, gbc);
setFieldConstraints(gbc, row);
rowStartTag = new JTextField(30);
jp.add(rowStartTag, gbc);
setLabelConstraints(gbc, ++row);
cellTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.cellTag"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(cellTagLabel, gbc);
setFieldConstraints(gbc, row);
cellTag = new JTextField(30);
jp.add(cellTag, gbc);
setLabelConstraints(gbc, ++row);
rowEndTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.rowEndTag"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(rowEndTagLabel, gbc);
setFieldConstraints(gbc, row);
rowEndTag = new JTextField(30);
jp.add(rowEndTag, gbc);
setLabelConstraints(gbc, ++row);
tableEndTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.tableEndTag"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(tableEndTagLabel, gbc);
setFieldConstraints(gbc, row);
tableEndTag = new JTextField(30);
jp.add(tableEndTag, gbc);
setLabelConstraints(gbc, ++row);
noWikiTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.noWikiTag"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(noWikiTagLabel, gbc);
setFieldConstraints(gbc, row);
noWikiTag = new JTextField(30);
jp.add(noWikiTag, gbc);
setLabelConstraints(gbc, ++row);
noWikiTagLabel = new JLabel(s_stringMgr.getString("WikiTablePreferencesPanel.enabled"), SwingConstants.RIGHT); //$NON-NLS-1$
jp.add(noWikiTagLabel, gbc);
setFieldConstraints(gbc, row);
enabled = new JCheckBox();
jp.add(enabled, gbc);
addFocusLostListeners();
return jp;
}
/**
* Adds a {@link FocusListener} for the focus lost event to all input fields of the detail panel.
* This listener updated the configuration and the example.
*/
private void addFocusLostListeners() {
name.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
String newName = name.getText();
config.setName(newName);
if(!isUniqueName(config)){
throw new IllegalArgumentException(s_stringMgr.getString("WikiTablePreferencesPanel.errorConfigNotUnique")); //$NON-NLS-1$
}
}
});
tableStartTag.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
config.setTableStartTag(tableStartTag.getText());
}
});
headerStartTag.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
config.setHeaderStartTag(headerStartTag.getText());
}
});
headerCellTag.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
config.setHeaderCell(headerCellTag.getText());
}
});
headerEndTag.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
config.setHeaderEndTag(headerEndTag.getText());
}
});
rowStartTag.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
config.setRowStartTag(rowStartTag.getText());
}
});
cellTag.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
config.setDataCell(cellTag.getText());
}
});
rowEndTag.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
config.setRowEndTag(rowEndTag.getText());
}
});
tableEndTag.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
config.setTableEndTag(tableEndTag.getText());
}
});
noWikiTag.addFocusListener(new DetailConfigFocusLostListener(){
@Override
public void setValue(IWikiTableConfiguration config) throws IllegalArgumentException {
config.setNoWikiTag(noWikiTag.getText());
}
});
enabled.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
IWikiTableConfiguration config = currentConfigurationInView;
config.setEnabled(enabled.isSelected());
}
});
}
private void setLabelConstraints(GridBagConstraints gbc, int gridy)
{
gbc.gridx = 0;
gbc.gridy = gridy;
// gbc.gridwidth = 0;
gbc.weightx = 0;
gbc.insets = LABEL_INSETS;
gbc.fill = GridBagConstraints.NONE;
gbc.anchor = GridBagConstraints.EAST;
}
private void setFieldConstraints(GridBagConstraints gbc, int gridy)
{
gbc.gridx = 1;
gbc.gridy = gridy;
// gbc.gridwidth = 0;
gbc.weightx = 1;
gbc.insets = FIELD_INSETS;
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.anchor = GridBagConstraints.WEST;
}
public IApplication getApplication() {
return application;
}
public void setApplication(IApplication app) {
this.application = app;
}
/**
* Adds all available configurations to this page
* @param squirrelPreferences
*/
public void loadData(SquirrelPreferences squirrelPreferences) {
List<IWikiTableConfiguration> configurations = wikiTableConfigFactory.getConfigurations();
for (IWikiTableConfiguration configuration : configurations) {
wikiConfigListModel.addElement(configuration);
}
wikiConfigList.setSelectedIndex(0);
IWikiTableConfiguration selectedValue = (IWikiTableConfiguration)wikiConfigList.getSelectedValue();
showDataFor(selectedValue);
if(selectedValue == null){
this.copyButton.setEnabled(false);
}
}
/**
* Shows the data of a specific configuration in the detail panel
* @param selectedValue
*/
private void showDataFor(IWikiTableConfiguration selectedValue) {
if(selectedValue != null){
this.currentConfigurationInView = selectedValue;
fillDetailPane(selectedValue);
showExample(selectedValue);
}else{
this.currentConfigurationInView = null;
enableInputFields(false);
}
}
/**
* Disable all input fields
*/
private void enableInputFields(boolean enabled) {
this.name.setEnabled(enabled);
this.tableStartTag.setEnabled(enabled);
this.headerStartTag.setEnabled(enabled);
this.headerCellTag.setEnabled(enabled);
this.headerEndTag.setEnabled(enabled);
this.rowStartTag.setEnabled(enabled);
this.cellTag.setEnabled(enabled);
this.rowEndTag.setEnabled(enabled);
this.tableEndTag.setEnabled(enabled);
this.noWikiTag.setEnabled(enabled);
this.deleteButton.setEnabled(enabled);
if(currentConfigurationInView != null){
this.enabled.setEnabled(true);
}else{
this.enabled.setEnabled(false);
}
}
/**
* Shows the example for a specific configuration
*/
private void showExample(IWikiTableConfiguration selectedValue) {
// select all rows
exampleTable.changeSelection(0, 0, false, false);
exampleTable.changeSelection(exampleTable.getRowCount()-1, exampleTable.getColumnCount()-1, true, true);
String example = selectedValue.createTransformer().transform(this.exampleTable);
this.exampleText.setText(example);
}
/**
* Fills the detail pane with the values of the selected configuration
* @param selectedValue
*/
private void fillDetailPane(IWikiTableConfiguration config) {
this.name.setText(config.getName());
this.tableStartTag.setText(config.getTableStartTag());
this.headerStartTag.setText(config.getHeaderStartTag());
this.headerCellTag.setText(config.getHeaderCell());
this.headerEndTag.setText(config.getHeaderEndTag());
this.rowStartTag.setText(config.getRowStartTag());
this.cellTag.setText(config.getDataCell());
this.rowEndTag.setText(config.getRowEndTag());
this.tableEndTag.setText(config.getTableEndTag());
this.noWikiTag.setText(config.getNoWikiTag());
this.enabled.setSelected(config.isEnabled());
enableInputFields(!config.isReadOnly());
}
/**
* Adds a new configuration to the list.
*/
private void addNewConfigToList(IWikiTableConfiguration newConfig) {
wikiConfigListModel.addElement(newConfig);
wikiConfigList.setSelectedValue(newConfig, true);
}
/**
* checks, if the name of a configuration is unique in the system.
*/
private boolean isUniqueName(IWikiTableConfiguration config) {
boolean unique;
Object[] configArray = wikiConfigListModel.toArray();
unique = true;
for (Object conf : configArray) {
// This must be a check, if the references are identical.
if(conf != conf){
if(StringUtils.equalsIgnoreCase(config.getName(), ((IWikiTableConfiguration) conf).getName())){
unique = false;
}
}
}
return unique;
}
private abstract class DetailConfigFocusLostListener implements FocusListener{
/* (non-Javadoc)
* @see java.awt.event.FocusListener#focusGained(java.awt.event.FocusEvent)
*/
@Override
public void focusGained(FocusEvent envent) {
// nothing
}
/* (non-Javadoc)
* @see java.awt.event.FocusListener#focusLost(java.awt.event.FocusEvent)
*/
@Override
public void focusLost(final FocusEvent event) {
if(!event.isTemporary()){
IWikiTableConfiguration config = currentConfigurationInView;
try{
setValue(config);
}catch (IllegalArgumentException e) {
application.showErrorDialog(e.getMessage());
JTextField textField = (JTextField) event.getComponent();
String text = textField.getText();
System.out.println(text);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
event.getComponent().requestFocus();
}
});
}
showExample(config);
}
}
public abstract void setValue(IWikiTableConfiguration config) throws IllegalArgumentException;
}
/**
* Puts the configurations back into the {@link IWikiTableConfigurationFactory}.
* @see IWikiTableConfigurationFactory#replaceUserSpecificConfigurations(List)
* @param squirrelPreferences
*/
public void applyChanges() {
Object[] array = wikiConfigListModel.toArray();
List<IWikiTableConfiguration> userSpecific = new ArrayList<IWikiTableConfiguration>();
List<IWikiTableConfiguration> buildIn = new ArrayList<IWikiTableConfiguration>();
for (Object object : array) {
IWikiTableConfiguration config = (IWikiTableConfiguration) object;
if(config.isReadOnly() == false){
userSpecific.add(config);
}else{
buildIn.add(config);
}
}
wikiTableConfigFactory.replaceUserSpecificConfigurations(userSpecific);
wikiTableConfigFactory.replaceBuilInConfiguration(buildIn);
}
public IWikiTableConfigurationFactory getWikiTableConfigFactory() {
return wikiTableConfigFactory;
}
public void setWikiTableConfigFactory(IWikiTableConfigurationFactory wikiTableConfigFactory) {
this.wikiTableConfigFactory = wikiTableConfigFactory;
}
}
| |
package ee.pri.bcup.client.pool.model;
import java.io.Serializable;
import java.util.ArrayList;
import lombok.Data;
import org.apache.log4j.Logger;
import ee.pri.bcup.client.pool.PoolAppletContext;
import ee.pri.bcup.client.pool.model.hit.HitResult;
import ee.pri.bcup.client.pool.model.hit.HitTimer;
import ee.pri.bcup.client.pool.model.listener.PoolFeedback;
import ee.pri.bcup.client.pool.model.rules.PoolRuleSet;
import ee.pri.bcup.common.message.ListenerScope;
import ee.pri.bcup.common.message.MessageReceiver;
import ee.pri.bcup.common.message.server.pool.ServerPoolEndMessage;
import ee.pri.bcup.common.message.server.pool.ServerPoolSpeedMessage;
import ee.pri.bcup.common.model.GamePartyType;
import ee.pri.bcup.common.model.GameType;
import ee.pri.bcup.common.model.Player;
import ee.pri.bcup.common.model.pool.table.Ball;
import ee.pri.bcup.common.model.pool.table.BallOwnershipKind;
import ee.pri.bcup.common.model.pool.table.DoubleTuple;
import ee.pri.bcup.common.model.pool.table.GameState;
import ee.pri.bcup.common.model.pool.table.HitState;
import ee.pri.bcup.common.model.pool.table.Table;
import ee.pri.bcup.common.model.pool.table.TurnState;
@Data
public class PoolGame implements Serializable {
private static final long serialVersionUID = 1L;
private static final Logger log = Logger.getLogger(PoolGame.class);
private Table table;
private GameState gameState = GameState.START_HIT;
private HitState hitState = HitState.PLACE;
private TurnState turnState = TurnState.LEFT;
private GamePartyType partyType;
private int startTries = 0;
private BallOwnershipKind leftBallsOwnership;
private PoolAppletContext appletContext;
private HitTimer hitTimer;
private Animator animator;
private PoolFeedback poolFeedback;
private boolean hadWhite = false;
private boolean hitTimeout = false;
private PoolSynchronizerHelper synchronizerHelper;
private Player winner;
private PoolRuleSet poolRuleSet = new PoolRuleSet();
private Player left;
private Player right;
private SpinHint spinHint = new SpinHint();
public PoolGame(
final PoolAppletContext appletContext,
final GamePartyType partyType,
long maxTimeInSeconds,
boolean myTurn,
GameType gameType) {
log.debug("creating pool game");
this.appletContext = appletContext;
this.partyType = partyType;
this.table = new Table(gameType);
this.poolFeedback = new PoolFeedback();
this.hitTimer = new HitTimer(this, maxTimeInSeconds);
this.hitTimer.start();
this.animator = new Animator(this);
turnState = myTurn ? TurnState.LEFT : TurnState.RIGHT;
if (partyType == GamePartyType.OBSERVER) {
left = appletContext.getObserverContext().getBuddy();
right = appletContext.getObserverContext().getOpponent();
} else {
left = appletContext.getPlayer();
right = appletContext.getProposeContext().getProposePartner();
}
left.setBalls(new ArrayList<Ball>());
right.setBalls(new ArrayList<Ball>());
appletContext.processListeners(this, ListenerScope.GAME);
}
@MessageReceiver(ServerPoolSpeedMessage.class)
public void receive(ServerPoolSpeedMessage message) {
if (getTurnState() == TurnState.RIGHT || partyType == GamePartyType.OBSERVER) {
setHitState(HitState.OTHER);
Ball whiteBall = table.getWhiteBall();
whiteBall.setLocation(new DoubleTuple(message.getLocationX(), message.getLocationY()));
whiteBall.setSpeed(new DoubleTuple(message.getSpeedX(), message.getSpeedY()));
whiteBall.setHorisontalSpin(message.getHorisontalSpin());
whiteBall.setVerticalSpin(message.getVerticalSpin());
hit();
}
}
@MessageReceiver(ServerPoolEndMessage.class)
public void receive(ServerPoolEndMessage message) {
Player player = appletContext.getPlayer(message.getWinnerPlayerId());
hitTimer.stop();
poolFeedback.gameUserMessage(appletContext.getMessage("pool.field.message.won.other", player), true);
}
public void resetTable() {
table.reset();
left.setBalls(new ArrayList<Ball>());
right.setBalls(new ArrayList<Ball>());
}
public void setHitState(HitState hitState) {
this.hitState = hitState;
}
public void hit() {
table.getWhiteBall().setOnTable(true);
table.getWhiteBall().setVisible(true);
poolFeedback.hit();
animator.hit();
table.cleanHoles();
hitTimer.stop();
}
public boolean isAllLeftBallsIn() {
if (leftBallsOwnership == null) {
return false;
}
for (Ball ball : table.getBalls()) {
if (ball.getType().getKind() == leftBallsOwnership && ball.isOnTable()) {
return false;
}
}
return true;
}
public boolean isAllRightBallsIn() {
if (leftBallsOwnership == null) {
return false;
}
for (Ball ball : table.getBalls()) {
if (ball.getType().getKind() == leftBallsOwnership.getOpposite() && ball.isOnTable()) {
return false;
}
}
return true;
}
public void confirmUserMessage() {
if (gameState == GameState.END) {
appletContext.exitGame();
}
}
/**
* Called by {@link HitTimer} when the hit timeout has occurred.
*/
public void hitTimeoutOccurred() {
hitTimeout = true;
poolRuleSet.execute(appletContext, null);
}
/**
* Called by {@link Animator} when balls have stopped
* moving after hit.
*/
public void ballsStopped() {
HitResult hitResult = new HitResult(this);
poolRuleSet.execute(appletContext, hitResult);
synchronizerHelper.runSynchronizers();
}
public BallOwnershipKind resolveOwenership(Ball ball) {
if (left.getBalls().contains(ball)) {
return leftBallsOwnership;
} else if (right.getBalls().contains(ball)) {
return leftBallsOwnership.getOpposite();
}
return null;
}
/**
* Used in game rules.
*/
public boolean isBallOwnershipDecided() {
return leftBallsOwnership != null;
}
public void destroy() {
hitTimer.stop();
animator.stop();
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static com.facebook.buck.jvm.java.JavaBuckConfig.TARGETED_JAVA_VERSION;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.cli.BuckConfig;
import com.facebook.buck.cli.BuckConfigTestUtils;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.testutil.integration.DebuggableTemporaryFolder;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.environment.Architecture;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.nio.file.Path;
public class JavaBuckConfigTest {
@Rule
public DebuggableTemporaryFolder temporaryFolder = new DebuggableTemporaryFolder();
private ProjectFilesystem defaultFilesystem;
@Before
public void setUpDefaultFilesystem() {
defaultFilesystem = new ProjectFilesystem(temporaryFolder.getRootPath());
}
@Test
public void whenJavacIsNotSetThenAbsentIsReturned() throws IOException {
JavaBuckConfig config = createWithDefaultFilesystem(new StringReader(""));
assertEquals(Optional.absent(), config.getJavacPath());
}
@Test
public void whenJavacExistsAndIsExecutableThenCorrectPathIsReturned() throws IOException {
File javac = temporaryFolder.newFile();
assertTrue(javac.setExecutable(true));
Reader reader = new StringReader(
Joiner.on('\n').join(
"[tools]",
" javac = " + javac.toPath().toString().replace("\\", "\\\\")));
JavaBuckConfig config = createWithDefaultFilesystem(reader);
assertEquals(Optional.of(javac.toPath()), config.getJavacPath());
}
@Test
public void whenJavacDoesNotExistThenHumanReadableExceptionIsThrown() throws IOException {
String invalidPath = temporaryFolder.getRoot().getAbsolutePath() + "DoesNotExist";
Reader reader = new StringReader(Joiner.on('\n').join(
"[tools]",
" javac = " + invalidPath.replace("\\", "\\\\")));
JavaBuckConfig config = createWithDefaultFilesystem(reader);
try {
config.getJavacPath();
fail("Should throw exception as javac file does not exist.");
} catch (HumanReadableException e) {
assertEquals(e.getHumanReadableErrorMessage(), "Javac does not exist: " + invalidPath);
}
}
@Test
public void whenJavacIsNotExecutableThenHumanReadableExeceptionIsThrown() throws IOException {
File javac = temporaryFolder.newFile();
assumeTrue("Should be able to set file non-executable", javac.setExecutable(false));
Reader reader = new StringReader(Joiner.on('\n').join(
"[tools]",
" javac = " + javac.toPath().toString()));
JavaBuckConfig config = createWithDefaultFilesystem(reader);
try {
config.getJavacPath();
fail("Should throw exception as javac file is not executable.");
} catch (HumanReadableException e) {
assertEquals(e.getHumanReadableErrorMessage(), "Javac is not executable: " + javac.getPath());
}
}
@Test
public void whenJavacJarDoesNotExistThenHumanReadableExceptionIsThrown() throws IOException {
String invalidPath = temporaryFolder.getRoot().getAbsolutePath() + "DoesNotExist";
Reader reader = new StringReader(Joiner.on('\n').join(
"[tools]",
" javac_jar = " + invalidPath.replace("\\", "\\\\")));
JavaBuckConfig config = createWithDefaultFilesystem(reader);
try {
config.getJavacJarPath();
fail("Should throw exception as javac file does not exist.");
} catch (HumanReadableException e) {
assertEquals(
"Overridden tools:javac_jar path not found: " + invalidPath,
e.getHumanReadableErrorMessage());
}
}
@Test
public void shouldSetJavaTargetAndSourceVersionFromConfig()
throws IOException, InterruptedException {
String sourceLevel = "source-level";
String targetLevel = "target-level";
String localConfig = String.format(
"[java]\nsource_level = %s\ntarget_level = %s",
sourceLevel,
targetLevel);
JavaBuckConfig config = createWithDefaultFilesystem(new StringReader(localConfig));
JavacOptions options = config.getDefaultJavacOptions();
assertEquals(sourceLevel, options.getSourceLevel());
assertEquals(targetLevel, options.getTargetLevel());
}
@Test
public void shouldSetJavaTargetAndSourceVersionDefaultToSaneValues()
throws IOException, InterruptedException {
JavaBuckConfig config = createWithDefaultFilesystem(new StringReader(""));
JavacOptions options = config.getDefaultJavacOptions();
assertEquals(TARGETED_JAVA_VERSION, options.getSourceLevel());
assertEquals(TARGETED_JAVA_VERSION, options.getTargetLevel());
}
@Test
public void shouldPopulateTheMapOfSourceLevelToBootclasspath()
throws IOException, InterruptedException {
String localConfig = "[java]\nbootclasspath-6 = one.jar\nbootclasspath-7 = two.jar";
JavaBuckConfig config = createWithDefaultFilesystem(new StringReader(localConfig));
JavacOptions options = config.getDefaultJavacOptions();
JavacOptions jse5 = JavacOptions.builder(options).setSourceLevel("5").build();
JavacOptions jse6 = JavacOptions.builder(options).setSourceLevel("6").build();
JavacOptions jse7 = JavacOptions.builder(options).setSourceLevel("7").build();
assertOptionKeyAbsent(jse5, "bootclasspath");
assertOptionsContains(jse6, "bootclasspath", "one.jar");
assertOptionsContains(jse7, "bootclasspath", "two.jar");
}
@Test
public void whenJavacIsNotSetInBuckConfigConfiguredRulesCreateJavaLibraryRuleWithJsr199Javac()
throws IOException, NoSuchBuildTargetException, InterruptedException {
BuckConfig buckConfig = FakeBuckConfig.builder().build();
JavaBuckConfig javaConfig = new JavaBuckConfig(buckConfig);
JavacOptions javacOptions = javaConfig.getDefaultJavacOptions();
Javac javac = javacOptions.getJavac();
assertTrue(javac.getClass().toString(), javac instanceof Jsr199Javac);
}
@Test
public void whenJavacIsSetInBuckConfigConfiguredRulesCreateJavaLibraryRuleWithJavacSet()
throws IOException, NoSuchBuildTargetException, InterruptedException {
final File javac = temporaryFolder.newFile();
javac.setExecutable(true);
ImmutableMap<String, ImmutableMap<String, String>> sections = ImmutableMap.of(
"tools", ImmutableMap.of("javac", javac.toString()));
BuckConfig buckConfig = FakeBuckConfig.builder().setSections(sections).build();
JavaBuckConfig javaConfig = new JavaBuckConfig(buckConfig);
JavacOptions javacOptions = javaConfig.getDefaultJavacOptions();
assertEquals(
javac.toPath(),
((ExternalJavac) javacOptions.getJavac()).getPath());
}
private void assertOptionKeyAbsent(JavacOptions options, String key) {
OptionAccumulator optionsConsumer = visitOptions(options);
assertThat(optionsConsumer.keyVals, not(hasKey(key)));
}
private void assertOptionsContains(
JavacOptions options,
String key,
String value) {
OptionAccumulator optionsConsumer = visitOptions(options);
assertThat(optionsConsumer.keyVals, hasEntry(key, value));
}
private OptionAccumulator visitOptions(JavacOptions options) {
OptionAccumulator optionsConsumer = new OptionAccumulator();
options.appendOptionsTo(optionsConsumer, Functions.<Path>identity());
return optionsConsumer;
}
private JavaBuckConfig createWithDefaultFilesystem(Reader reader)
throws IOException {
BuckConfig raw = BuckConfigTestUtils.createFromReader(
reader,
defaultFilesystem,
Architecture.detect(),
Platform.detect(),
ImmutableMap.copyOf(System.getenv()));
return new JavaBuckConfig(raw);
}
}
| |
/*
* Copyright (c) 2008-2010, Zepheira LLC Some rights reserved.
* Copyright (c) 2011 Talis Inc., Some rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the openrdf.org nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.openrdf.repository.object.compiler.model;
import java.io.File;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.openrdf.annotations.Iri;
import org.openrdf.model.Model;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.datatypes.XMLDatatypeUtil;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.model.vocabulary.OWL;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.model.vocabulary.RDFS;
import org.openrdf.repository.object.RDFObject;
import org.openrdf.repository.object.compiler.JavaNameResolver;
import org.openrdf.repository.object.compiler.RDFList;
import org.openrdf.repository.object.compiler.source.JavaMessageBuilder;
import org.openrdf.repository.object.compiler.source.JavaMethodBuilder;
import org.openrdf.repository.object.compiler.source.JavaPropertyBuilder;
import org.openrdf.repository.object.exceptions.ObjectStoreConfigException;
import org.openrdf.repository.object.traits.RDFObjectBehaviour;
import org.openrdf.repository.object.vocabulary.MSG;
/**
* Helper object for traversing the OWL model.
*
* @author James Leigh
*
*/
public class RDFClass extends RDFEntity {
private static final URI NOTHING = new URIImpl(OWL.NAMESPACE + "Nothing");
public RDFClass(Model model, Resource self) {
super(model, self);
}
public boolean isDatatype() {
if (self instanceof URI
&& XMLDatatypeUtil.isBuiltInDatatype((URI) self))
return true;
if (self.equals(RDFS.LITERAL))
return true;
if (self instanceof URI) {
URI uri = (URI) self;
if (uri.getNamespace().equals(RDF.NAMESPACE)
&& uri.getLocalName().equals("PlainLiteral"))
return true;
}
return isA(RDFS.DATATYPE);
}
public RDFClass getRange(URI pred) {
return getRange(new RDFProperty(model, pred));
}
public RDFClass getRange(RDFProperty property) {
return getRange(property, true);
}
public RDFClass getRange(RDFProperty property, boolean convariant) {
RDFClass type = getRangeOrNull(property, convariant);
if (type == null)
return new RDFClass(getModel(), RDFS.RESOURCE);
return type;
}
private RDFClass getRangeOrNull(RDFProperty property, boolean convariant) {
if (convariant) {
for (RDFClass c : getRDFClasses(RDFS.SUBCLASSOF)) {
if (c.isA(OWL.RESTRICTION)) {
if (property.equals(c.getRDFProperty(OWL.ONPROPERTY))) {
RDFClass type = c.getRDFClass(OWL.ALLVALUESFROM);
if (type != null) {
return type;
}
}
}
}
}
for (RDFClass c : getRDFClasses(RDFS.SUBCLASSOF)) {
if (c.isA(OWL.RESTRICTION) || c.equals(this) || MSG.MESSAGE.equals(c.getURI()))
continue;
RDFClass type = ((RDFClass) c).getRangeOrNull(property, convariant);
if (type != null) {
return type;
}
}
for (RDFClass r : property.getRDFClasses(RDFS.RANGE)) {
return r;
}
for (RDFProperty p : property.getRDFProperties(RDFS.SUBPROPERTYOF)) {
RDFClass superRange = getRangeOrNull(p, convariant);
if (superRange != null) {
return superRange;
}
}
return null;
}
public boolean isFunctional(RDFProperty property) {
return isFunctionalProperty(property);
}
public List<? extends Value> getList(URI pred) {
List<? extends Value> list = null;
for (Value obj : model.filter(self, pred, null).objects()) {
if (list == null && obj instanceof Resource) {
list = new RDFList(model, (Resource) obj).asList();
} else {
List<? extends Value> other = new RDFList(model, (Resource) obj)
.asList();
if (!list.equals(other)) {
other.removeAll(list);
((List) list).addAll(other);
}
}
}
return list;
}
public List<RDFProperty> getParameters() {
TreeSet<String> set = new TreeSet<String>();
addParameters(set, new HashSet<Value>());
List<RDFProperty> list = new ArrayList<RDFProperty>();
for (String uri : set) {
list.add(new RDFProperty(model, new URIImpl(uri)));
}
return list;
}
public RDFProperty getResponseProperty() {
Set<RDFProperty> set = new HashSet<RDFProperty>();
set.add(new RDFProperty(model, MSG.OBJECT_SET));
set.add(new RDFProperty(model, MSG.LITERAL_SET));
set.add(new RDFProperty(model, MSG.OBJECT));
set.add(new RDFProperty(model, MSG.LITERAL));
for (RDFClass c : getRestrictions()) {
RDFProperty property = c.getRDFProperty(OWL.ONPROPERTY);
String valuesFrom = c.getString(OWL.ALLVALUESFROM);
if (RDFS.RESOURCE.stringValue().equals(valuesFrom))
continue;
if (NOTHING.stringValue().equals(valuesFrom))
continue;
BigInteger card = c.getBigInteger(OWL.CARDINALITY);
if (card != null && 0 == card.intValue())
continue;
BigInteger max = c.getBigInteger(OWL.MAXCARDINALITY);
if (max != null && 0 == max.intValue())
continue;
if (set.contains(property))
return property;
}
for (RDFClass c : getRestrictions()) {
RDFProperty property = c.getRDFProperty(OWL.ONPROPERTY);
String valuesFrom = c.getString(OWL.ALLVALUESFROM);
if (RDFS.RESOURCE.stringValue().equals(valuesFrom))
continue;
if (set.contains(property))
return property;
}
for (RDFClass c : getRestrictions()) {
RDFProperty property = c.getRDFProperty(OWL.ONPROPERTY);
if (set.contains(property))
return property;
}
return new RDFProperty(model, MSG.OBJECT_SET);
}
public boolean isMinCardinality(RDFProperty property) {
BigInteger one = BigInteger.valueOf(1);
for (RDFClass c : getRDFClasses(RDFS.SUBCLASSOF)) {
if (c.isA(OWL.RESTRICTION)) {
if (property.equals(c.getRDFProperty(OWL.ONPROPERTY))) {
if (one.equals(c.getBigInteger(OWL.MAXCARDINALITY))
&& one.equals(c.getBigInteger(OWL.MINCARDINALITY))
|| one.equals(c.getBigInteger(OWL.CARDINALITY))) {
return true;
}
}
} else if (equals(c)) {
continue;
} else if (c.isMinCardinality(property)) {
return true;
}
}
return false;
}
public boolean isEmpty(JavaNameResolver resolver) {
Collection<RDFProperty> properties = getDeclaredProperties();
if (properties.size() > 1)
return false;
if (!properties.isEmpty()) {
URI uri = properties.iterator().next().getURI();
if (!MSG.TARGET.equals(uri))
return false;
}
if (!getDeclaredMessages().isEmpty())
return false;
// TODO check annotations
return false;
}
public File generateSourceCode(File dir, JavaNameResolver resolver)
throws Exception {
File source = createSourceFile(dir, resolver);
if (isDatatype()) {
JavaMessageBuilder builder = new JavaMessageBuilder(source, resolver);
String pkg = resolver.getPackageName(this.getURI());
String simple = resolver.getSimpleName(getURI());
if (pkg == null) {
builder.imports(simple);
} else {
builder.pkg(pkg);
builder.imports(pkg + '.' + simple);
}
classHeader(simple, builder);
stringConstructor(builder);
builder.close();
} else {
JavaMessageBuilder builder = new JavaMessageBuilder(source, resolver);
interfaceHeader(builder);
constants(builder);
for (RDFProperty prop : getDeclaredProperties()) {
property(builder, prop);
}
for (RDFClass type : getDeclaredMessages()) {
builder.message(type);
}
builder.close();
}
return source;
}
public List<RDFProperty> getFunctionalDatatypeProperties() {
List<RDFProperty> list = new ArrayList<RDFProperty>();
for (RDFProperty property : getProperties()) {
if (isFunctional(property) && getRange(property).isDatatype()) {
list.add(property);
}
}
return list;
}
public Collection<RDFClass> getDeclaredMessages() {
Set<RDFClass> set = new TreeSet<RDFClass>();
for (Resource res : model.filter(null, OWL.ALLVALUESFROM, self)
.subjects()) {
if (model.contains(res, OWL.ONPROPERTY, MSG.TARGET)) {
for (Resource msg : model.filter(null, RDFS.SUBCLASSOF, res)
.subjects()) {
if (MSG.MESSAGE.equals(msg))
continue;
RDFClass rc = new RDFClass(model, msg);
if (rc.isMessageClass()) {
set.add(rc);
}
}
}
}
return set;
}
public Collection<RDFProperty> getDeclaredProperties() {
TreeSet<String> set = new TreeSet<String>();
for (Resource prop : model.filter(null, RDFS.DOMAIN, self).subjects()) {
if (prop instanceof URI) {
set.add(prop.stringValue());
}
}
for (RDFClass res : getRDFClasses(RDFS.SUBCLASSOF)) {
if (res.isA(OWL.RESTRICTION)) {
RDFProperty prop = res.getRDFProperty(OWL.ONPROPERTY);
if (isFunctional(prop) == isFunctionalProperty(prop)) {
set.add(prop.getURI().stringValue());
}
}
}
List<RDFProperty> list = new ArrayList<RDFProperty>(set.size());
for (String uri : set) {
list.add(new RDFProperty(model, new URIImpl(uri)));
}
return list;
}
public Collection<RDFClass> getRestrictions() {
Collection<RDFClass> restrictions = new LinkedHashSet<RDFClass>();
for (RDFClass c : getRDFClasses(RDFS.SUBCLASSOF)) {
if (c.isA(OWL.RESTRICTION)) {
restrictions.add(c);
} else if (equals(c)) {
continue;
} else {
restrictions.addAll(c.getRestrictions());
}
}
return restrictions;
}
public RDFProperty getRDFProperty(URI pred) {
Resource subj = model.filter(self, pred, null).objectResource();
if (subj == null)
return null;
return new RDFProperty(model, subj);
}
public boolean isMessageClass() {
return isMessage(this, new HashSet<RDFEntity>());
}
protected boolean isFunctionalProperty(RDFProperty property) {
if (property.isA(OWL.FUNCTIONALPROPERTY))
return true;
URI uri = property.getURI();
if (uri.equals(MSG.TARGET)
|| uri.equals(MSG.LITERAL)
|| uri.equals(MSG.OBJECT))
return true;
return false;
}
private void interfaceHeader(JavaMessageBuilder builder)
throws ObjectStoreConfigException {
String pkg = builder.getPackageName(this.getURI());
String simple = builder.getSimpleName(this.getURI());
if (pkg == null) {
builder.imports(simple);
} else {
builder.pkg(pkg);
builder.imports(pkg + '.' + simple);
}
builder.comment(this);
if (this.isA(OWL.DEPRECATEDCLASS)) {
builder.annotate(Deprecated.class);
}
builder.annotationProperties(this);
if (!builder.isAnonymous(this.getURI())) {
builder.annotateURI(Iri.class, builder.getType(this.getURI()));
}
builder.interfaceName(simple);
for (RDFClass sups : this.getRDFClasses(RDFS.SUBCLASSOF)) {
if (sups.getURI() == null || sups.equals(this))
continue;
builder.extend(builder.getClassName(sups.getURI()));
}
}
private void constants(JavaMessageBuilder builder) {
List<? extends Value> oneOf = this.getList(OWL.ONEOF);
if (oneOf != null) {
Map<String, URI> names = new LinkedHashMap<String, URI>();
for (Value one : oneOf) {
if (one instanceof URI) {
URI uri = (URI) one;
String localPart = uri.getLocalName();
if (localPart.length() < 1) {
localPart = uri.stringValue();
}
String name = localPart.replaceAll("^[^a-zA-Z]", "_")
.replaceAll("\\W+", "_");
if (names.containsKey(name)) {
int count = 1;
while (names.containsKey(name + '_' + count)) {
count++;
}
name = name + '_' + count;
}
names.put(name, uri);
}
}
if (!names.isEmpty()) {
names = toUpperCase(names);
for (Map.Entry<String, URI> e : names.entrySet()) {
builder.staticURIField(e.getKey(), e.getValue());
}
if (!names.containsKey("ONEOF")) {
builder.staticURIArrayField("ONEOF", names.keySet());
}
}
}
}
private Map<String, URI> toUpperCase(Map<String, URI> words) {
Map<String, URI> insensitive = new LinkedHashMap<String, URI>();
for (String local : words.keySet()) {
String upper = local.toUpperCase();
if (insensitive.containsKey(upper))
return words; // case sensitive
insensitive.put(upper, words.get(local));
}
return insensitive;
}
private void stringConstructor(JavaMessageBuilder builder)
throws ObjectStoreConfigException {
String cn = builder.getClassName(this.getURI());
String simple = builder.getSimpleName(this.getURI());
JavaMethodBuilder method = builder.staticMethod("valueOf");
method.returnType(cn);
method.param(String.class.getName(), "value");
method.code("return new ").code(simple).code("(value);").end();
boolean child = false;
for (RDFClass sups : this.getRDFClasses(RDFS.SUBCLASSOF)) {
if (sups.getURI() == null || sups.equals(this))
continue;
// rdfs:Literal rdfs:subClassOf rdfs:Resource
if (!sups.isDatatype())
continue;
child = true;
}
if (child) {
JavaMethodBuilder code = builder.constructor();
code.param(String.class.getName(), "value");
code.code("super(value);");
code.end();
} else {
builder.field(String.class.getName(), "value");
JavaMethodBuilder code = builder.constructor();
code.param(String.class.getName(), "value");
code.code("this.value = value;");
code.end();
code = builder.method("toString", false).returnType(
String.class.getName());
code.code("return value;").end();
code = builder.method("hashCode", false).returnType("int");
code.code("return value.hashCode();").end();
code = builder.method("equals", false).returnType("boolean");
code.param(Object.class.getName(), "o");
String equals = "return getClass().equals(o.getClass()) && toString().equals(o.toString());";
code.code(equals).end();
}
}
private void property(JavaMessageBuilder builder, RDFProperty prop)
throws ObjectStoreConfigException {
JavaPropertyBuilder prop1 = builder.property(builder.getPropertyName(
this, prop));
builder.comment(prop1, prop);
if (prop.isA(OWL.DEPRECATEDPROPERTY)) {
prop1.annotate(Deprecated.class);
}
builder.annotationProperties(prop1, prop);
for (RDFClass c : getRestrictions()) {
RDFProperty property = c.getRDFProperty(OWL.ONPROPERTY);
if (prop.equals(property)) {
builder.annotationProperties(prop1, c);
}
}
URI type = builder.getType(prop.getURI());
prop1.annotateURI(Iri.class, type);
String className = builder.getPropertyClassName(this, prop);
if (this.isFunctional(prop)) {
prop1.type(className);
} else {
prop1.setOf(className);
}
prop1.getter();
builder.comment(prop1, prop);
if (prop.isA(OWL.DEPRECATEDPROPERTY)) {
prop1.annotate(Deprecated.class);
}
builder.annotationProperties(prop1, prop);
prop1.annotateURI(Iri.class, type);
prop1.openSetter();
prop1.closeSetter();
prop1.end();
}
private void addParameters(Set<String> parameters, Set<Value> skip) {
for (Resource prop : model.filter(null, RDFS.DOMAIN, self).subjects()) {
if (isParameter(prop)) {
parameters.add(prop.stringValue());
}
}
for (Value sup : model.filter(self, RDFS.SUBCLASSOF, null).objects()) {
if (isRDFSOrOWL(sup) || !skip.add(sup))
continue;
new RDFClass(model, (Resource) sup).addParameters(parameters, skip);
}
}
private boolean isRDFSOrOWL(Value sup) {
if (self instanceof URI && sup instanceof URI) {
String ns = ((URI) self).getNamespace();
return ns.equals(RDF.NAMESPACE) || ns.equals(RDFS.NAMESPACE)
|| ns.equals(OWL.NAMESPACE);
}
return false;
}
private boolean isParameter(Resource prop) {
return !model.contains(prop, RDF.TYPE, OWL.ANNOTATIONPROPERTY)
&& prop instanceof URI
&& !prop.stringValue().startsWith(MSG.NAMESPACE);
}
private boolean isMessage(RDFEntity message, Set<RDFEntity> set) {
if (MSG.MESSAGE.equals(message.getURI()))
return true;
set.add(message);
for (RDFClass sup : message.getRDFClasses(RDFS.SUBCLASSOF)) {
if (!set.contains(sup) && isMessage(sup, set))
return true;
}
return false;
}
private BigInteger getBigInteger(URI pred) {
Value value = model.filter(self, pred, null).objectValue();
if (value == null)
return null;
return new BigInteger(value.stringValue());
}
private Collection<RDFProperty> getProperties() {
return getProperties(new HashSet<Resource>(),
new ArrayList<RDFProperty>());
}
private Collection<RDFProperty> getProperties(Set<Resource> exclude,
Collection<RDFProperty> list) {
if (exclude.add(getResource())) {
list.addAll(getDeclaredProperties());
for (RDFClass sup : getRDFClasses(RDFS.SUBCLASSOF)) {
list = sup.getProperties(exclude, list);
}
}
return list;
}
private void classHeader(String simple, JavaMessageBuilder builder)
throws ObjectStoreConfigException {
builder.comment(this);
if (this.isDatatype()) {
builder.annotationProperties(this);
URI type = builder.getType(this.getURI());
builder.annotateURI(Iri.class, type);
builder.className(simple);
} else {
builder.annotationProperties(this);
builder.abstractName(simple);
}
if (this.isDatatype()) {
List<URI> supers = new ArrayList<URI>();
for (RDFClass sups : this.getRDFClasses(RDFS.SUBCLASSOF)) {
if (sups.getURI() == null || sups.equals(this))
continue;
// rdfs:Literal rdfs:subClassOf rdfs:Resource
if (!sups.isDatatype())
continue;
supers.add(sups.getURI());
}
if (supers.size() == 1) {
builder.extend(builder.getClassName(supers.get(0)));
}
}
if (!this.isDatatype()) {
URI range = this.getRange(MSG.TARGET).getURI();
if (range != null) {
builder.implement(builder.getClassName(range));
}
builder.implement(RDFObject.class.getName());
builder.implement(RDFObjectBehaviour.class.getName());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.jobsubmission;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Throwables.getRootCause;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Throwables.getStackTraceAsString;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CancellationException;
import java.util.function.Consumer;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.beam.model.jobmanagement.v1.JobApi;
import org.apache.beam.model.jobmanagement.v1.JobApi.JobMessage;
import org.apache.beam.model.jobmanagement.v1.JobApi.JobState;
import org.apache.beam.model.jobmanagement.v1.JobApi.JobState.Enum;
import org.apache.beam.model.jobmanagement.v1.JobApi.JobStateEvent;
import org.apache.beam.model.pipeline.v1.RunnerApi;
import org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline;
import org.apache.beam.runners.fnexecution.provisioning.JobInfo;
import org.apache.beam.sdk.PipelineResult;
import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.Timestamps;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.FutureCallback;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Futures;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ListenableFuture;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ListeningExecutorService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Internal representation of a Job which has been invoked (prepared and run) by a client. */
public class JobInvocation {
private static final Logger LOG = LoggerFactory.getLogger(JobInvocation.class);
private final RunnerApi.Pipeline pipeline;
private final PortablePipelineRunner pipelineRunner;
private final JobInfo jobInfo;
private final ListeningExecutorService executorService;
private final List<JobStateEvent> stateHistory;
private final List<JobMessage> messageHistory;
private final List<Consumer<JobStateEvent>> stateObservers;
private final List<Consumer<JobMessage>> messageObservers;
private JobApi.MetricResults metrics;
private PortablePipelineResult resultHandle;
@Nullable private ListenableFuture<PortablePipelineResult> invocationFuture;
public JobInvocation(
JobInfo jobInfo,
ListeningExecutorService executorService,
Pipeline pipeline,
PortablePipelineRunner pipelineRunner) {
this.jobInfo = jobInfo;
this.executorService = executorService;
this.pipeline = pipeline;
this.pipelineRunner = pipelineRunner;
this.stateObservers = new ArrayList<>();
this.messageObservers = new ArrayList<>();
this.invocationFuture = null;
this.stateHistory = new ArrayList<>();
this.messageHistory = new ArrayList<>();
this.metrics = JobApi.MetricResults.newBuilder().build();
this.setState(JobState.Enum.STOPPED);
}
private PortablePipelineResult runPipeline() throws Exception {
return pipelineRunner.run(pipeline, jobInfo);
}
/** Start the job. */
public synchronized void start() {
LOG.info("Starting job invocation {}", getId());
if (getState() != JobState.Enum.STOPPED) {
throw new IllegalStateException(String.format("Job %s already running.", getId()));
}
setState(JobState.Enum.STARTING);
invocationFuture = executorService.submit(this::runPipeline);
// TODO: Defer transitioning until the pipeline is up and running.
setState(JobState.Enum.RUNNING);
Futures.addCallback(
invocationFuture,
new FutureCallback<PortablePipelineResult>() {
@Override
public void onSuccess(PortablePipelineResult pipelineResult) {
if (pipelineResult != null) {
PipelineResult.State state = pipelineResult.getState();
if (state.isTerminal()) {
metrics = pipelineResult.portableMetrics();
} else {
resultHandle = pipelineResult;
}
switch (state) {
case DONE:
setState(Enum.DONE);
break;
case RUNNING:
setState(Enum.RUNNING);
break;
case CANCELLED:
setState(Enum.CANCELLED);
break;
case FAILED:
setState(Enum.FAILED);
break;
default:
setState(JobState.Enum.UNSPECIFIED);
}
} else {
setState(JobState.Enum.UNSPECIFIED);
}
}
@Override
public void onFailure(@Nonnull Throwable throwable) {
if (throwable instanceof CancellationException) {
// We have canceled execution, just update the job state
setState(JobState.Enum.CANCELLED);
return;
}
String message = String.format("Error during job invocation %s.", getId());
LOG.error(message, throwable);
sendMessage(
JobMessage.newBuilder()
.setMessageText(getStackTraceAsString(throwable))
.setImportance(JobMessage.MessageImportance.JOB_MESSAGE_DEBUG)
.build());
sendMessage(
JobMessage.newBuilder()
.setMessageText(getRootCause(throwable).toString())
.setImportance(JobMessage.MessageImportance.JOB_MESSAGE_ERROR)
.build());
setState(JobState.Enum.FAILED);
}
},
executorService);
}
/** @return Unique identifier for the job invocation. */
public String getId() {
return jobInfo.jobId();
}
/** Cancel the job. */
public synchronized void cancel() {
LOG.info("Canceling job invocation {}", getId());
if (this.invocationFuture != null) {
this.invocationFuture.cancel(true /* mayInterruptIfRunning */);
Futures.addCallback(
invocationFuture,
new FutureCallback<PortablePipelineResult>() {
@Override
public void onSuccess(PortablePipelineResult pipelineResult) {
// Do not cancel when we are already done.
if (pipelineResult != null
&& pipelineResult.getState() != PipelineResult.State.DONE) {
try {
pipelineResult.cancel();
setState(JobState.Enum.CANCELLED);
} catch (IOException exn) {
throw new RuntimeException(exn);
}
}
}
@Override
public void onFailure(Throwable throwable) {}
},
executorService);
}
}
public JobApi.MetricResults getMetrics() {
if (resultHandle != null) {
metrics = resultHandle.portableMetrics();
}
return metrics;
}
/** Retrieve the job's current state. */
public JobState.Enum getState() {
return getStateEvent().getState();
}
/** Retrieve the job's current state. */
public JobStateEvent getStateEvent() {
return stateHistory.get(stateHistory.size() - 1);
}
/** Retrieve the job's pipeline. */
public RunnerApi.Pipeline getPipeline() {
return this.pipeline;
}
/** Listen for job state changes with a {@link Consumer}. */
public synchronized void addStateListener(Consumer<JobStateEvent> stateStreamObserver) {
for (JobStateEvent event : stateHistory) {
stateStreamObserver.accept(event);
}
stateObservers.add(stateStreamObserver);
}
/** Listen for job messages with a {@link Consumer}. */
public synchronized void addMessageListener(Consumer<JobMessage> messageStreamObserver) {
for (JobMessage msg : messageHistory) {
messageStreamObserver.accept(msg);
}
messageObservers.add(messageStreamObserver);
}
/** Convert to {@link JobApi.JobInfo}. */
public JobApi.JobInfo toProto() {
return JobApi.JobInfo.newBuilder()
.setJobId(jobInfo.jobId())
.setJobName(jobInfo.jobName())
.setPipelineOptions(jobInfo.pipelineOptions())
.setState(getState())
.build();
}
private synchronized void setState(JobState.Enum state) {
JobStateEvent event =
JobStateEvent.newBuilder()
.setState(state)
.setTimestamp(Timestamps.fromMillis(System.currentTimeMillis()))
.build();
this.stateHistory.add(event);
for (Consumer<JobStateEvent> observer : stateObservers) {
observer.accept(event);
}
}
private synchronized void sendMessage(JobMessage message) {
messageHistory.add(message);
for (Consumer<JobMessage> observer : messageObservers) {
observer.accept(message);
}
}
static Boolean isTerminated(Enum state) {
switch (state) {
case DONE:
case FAILED:
case CANCELLED:
case DRAINED:
return true;
default:
return false;
}
}
}
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package build.buildfarm.server;
import static build.buildfarm.instance.Utils.putBlobFuture;
import static com.google.common.util.concurrent.Futures.addCallback;
import static com.google.common.util.concurrent.Futures.allAsList;
import static com.google.common.util.concurrent.Futures.catching;
import static com.google.common.util.concurrent.Futures.transform;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static java.lang.String.format;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import build.bazel.remote.execution.v2.BatchReadBlobsRequest;
import build.bazel.remote.execution.v2.BatchReadBlobsResponse;
import build.bazel.remote.execution.v2.BatchUpdateBlobsRequest;
import build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.Request;
import build.bazel.remote.execution.v2.BatchUpdateBlobsResponse;
import build.bazel.remote.execution.v2.BatchUpdateBlobsResponse.Response;
import build.bazel.remote.execution.v2.ContentAddressableStorageGrpc;
import build.bazel.remote.execution.v2.Digest;
import build.bazel.remote.execution.v2.FindMissingBlobsRequest;
import build.bazel.remote.execution.v2.FindMissingBlobsResponse;
import build.bazel.remote.execution.v2.GetTreeRequest;
import build.bazel.remote.execution.v2.GetTreeResponse;
import build.buildfarm.common.DigestUtil;
import build.buildfarm.common.grpc.TracingMetadataUtils;
import build.buildfarm.instance.Instance;
import build.buildfarm.v1test.Tree;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.ListenableFuture;
import io.grpc.Status;
import io.grpc.Status.Code;
import io.grpc.stub.StreamObserver;
import io.prometheus.client.Histogram;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
public class ContentAddressableStorageService
extends ContentAddressableStorageGrpc.ContentAddressableStorageImplBase {
private static final Logger logger =
Logger.getLogger(ContentAddressableStorageService.class.getName());
private static final Histogram missingBlobs =
Histogram.build().name("missing_blobs").help("Find missing blobs.").register();
private final Instance instance;
private final long writeDeadlineAfter;
private final TimeUnit writeDeadlineAfterUnits;
public ContentAddressableStorageService(
Instance instance, long writeDeadlineAfter, TimeUnit writeDeadlineAfterUnits) {
this.instance = instance;
this.writeDeadlineAfter = writeDeadlineAfter;
this.writeDeadlineAfterUnits = writeDeadlineAfterUnits;
}
String checkMessage(Digest digest, boolean found) {
return format(" (%s, %sfound)", DigestUtil.toString(digest), found ? "" : "not ");
}
@Override
public void findMissingBlobs(
FindMissingBlobsRequest request, StreamObserver<FindMissingBlobsResponse> responseObserver) {
instanceFindMissingBlobs(instance, request, responseObserver);
}
void instanceFindMissingBlobs(
Instance instance,
FindMissingBlobsRequest request,
StreamObserver<FindMissingBlobsResponse> responseObserver) {
Stopwatch stopwatch = Stopwatch.createStarted();
FindMissingBlobsResponse.Builder builder = FindMissingBlobsResponse.newBuilder();
ListenableFuture<FindMissingBlobsResponse.Builder> responseFuture =
transform(
instance.findMissingBlobs(
request.getBlobDigestsList(), TracingMetadataUtils.fromCurrentContext()),
builder::addAllMissingBlobDigests,
directExecutor());
addCallback(
responseFuture,
new FutureCallback<FindMissingBlobsResponse.Builder>() {
@Override
public void onSuccess(FindMissingBlobsResponse.Builder builder) {
try {
FindMissingBlobsResponse response = builder.build();
responseObserver.onNext(response);
responseObserver.onCompleted();
long elapsedMicros = stopwatch.elapsed(MICROSECONDS);
missingBlobs.observe(request.getBlobDigestsList().size());
logger.log(
Level.FINE,
"FindMissingBlobs("
+ instance.getName()
+ ") for "
+ request.getBlobDigestsList().size()
+ " blobs in "
+ elapsedMicros / 1000.0);
} catch (Throwable t) {
onFailure(t);
}
}
@SuppressWarnings("NullableProblems")
@Override
public void onFailure(Throwable t) {
Status status = Status.fromThrowable(t);
if (status.getCode() != Code.CANCELLED) {
logger.log(
Level.SEVERE,
format(
"findMissingBlobs(%s): %d",
request.getInstanceName(), request.getBlobDigestsCount()),
t);
responseObserver.onError(t);
}
}
},
directExecutor());
}
private static com.google.rpc.Status statusForCode(Code code) {
return com.google.rpc.Status.newBuilder().setCode(code.value()).build();
}
private static ListenableFuture<Response> toResponseFuture(
ListenableFuture<Code> codeFuture, Digest digest) {
return transform(
codeFuture,
code -> Response.newBuilder().setDigest(digest).setStatus(statusForCode(code)).build(),
directExecutor());
}
private static Iterable<ListenableFuture<Response>> putAllBlobs(
Instance instance,
Iterable<Request> requests,
long writeDeadlineAfter,
TimeUnit writeDeadlineAfterUnits) {
ImmutableList.Builder<ListenableFuture<Response>> responses = new ImmutableList.Builder<>();
for (Request request : requests) {
Digest digest = request.getDigest();
ListenableFuture<Digest> future =
putBlobFuture(
instance,
digest,
request.getData(),
writeDeadlineAfter,
writeDeadlineAfterUnits,
TracingMetadataUtils.fromCurrentContext());
responses.add(
toResponseFuture(
catching(
transform(future, (d) -> Code.OK, directExecutor()),
Throwable.class,
(e) -> Status.fromThrowable(e).getCode(),
directExecutor()),
digest));
}
return responses.build();
}
@Override
public void batchUpdateBlobs(
BatchUpdateBlobsRequest batchRequest,
StreamObserver<BatchUpdateBlobsResponse> responseObserver) {
BatchUpdateBlobsResponse.Builder response = BatchUpdateBlobsResponse.newBuilder();
ListenableFuture<BatchUpdateBlobsResponse> responseFuture =
transform(
allAsList(
StreamSupport.stream(
putAllBlobs(
instance,
batchRequest.getRequestsList(),
writeDeadlineAfter,
writeDeadlineAfterUnits)
.spliterator(),
false)
.map((future) -> transform(future, response::addResponses, directExecutor()))
.collect(Collectors.toList())),
(result) -> response.build(),
directExecutor());
addCallback(
responseFuture,
new FutureCallback<BatchUpdateBlobsResponse>() {
@Override
public void onSuccess(BatchUpdateBlobsResponse response) {
responseObserver.onNext(response);
responseObserver.onCompleted();
}
@SuppressWarnings("NullableProblems")
@Override
public void onFailure(Throwable t) {
responseObserver.onError(t);
}
},
directExecutor());
}
private void getInstanceTree(
Instance instance,
Digest rootDigest,
String pageToken,
int pageSize,
StreamObserver<GetTreeResponse> responseObserver) {
do {
Tree.Builder builder = Tree.newBuilder().setRootDigest(rootDigest);
String nextPageToken = instance.getTree(rootDigest, pageSize, pageToken, builder);
Tree tree = builder.build();
GetTreeResponse.Builder response =
GetTreeResponse.newBuilder().setNextPageToken(nextPageToken);
response.addAllDirectories(tree.getDirectoriesMap().values());
responseObserver.onNext(response.build());
pageToken = nextPageToken;
} while (!pageToken.isEmpty());
responseObserver.onCompleted();
}
void batchReadBlobs(
Instance instance,
BatchReadBlobsRequest batchRequest,
StreamObserver<BatchReadBlobsResponse> responseObserver) {
BatchReadBlobsResponse.Builder response = BatchReadBlobsResponse.newBuilder();
addCallback(
transform(
instance.getAllBlobsFuture(batchRequest.getDigestsList()),
(responses) -> response.addAllResponses(responses).build(),
directExecutor()),
new FutureCallback<BatchReadBlobsResponse>() {
@Override
public void onSuccess(BatchReadBlobsResponse response) {
responseObserver.onNext(response);
responseObserver.onCompleted();
}
@SuppressWarnings("NullableProblems")
@Override
public void onFailure(Throwable t) {
responseObserver.onError(Status.fromThrowable(t).asException());
}
},
directExecutor());
}
@Override
public void batchReadBlobs(
BatchReadBlobsRequest batchRequest, StreamObserver<BatchReadBlobsResponse> responseObserver) {
batchReadBlobs(instance, batchRequest, responseObserver);
}
@Override
public void getTree(GetTreeRequest request, StreamObserver<GetTreeResponse> responseObserver) {
int pageSize = request.getPageSize();
if (pageSize < 0) {
responseObserver.onError(Status.INVALID_ARGUMENT.asException());
return;
}
getInstanceTree(
instance, request.getRootDigest(), request.getPageToken(), pageSize, responseObserver);
}
}
| |
package mx.infotec.dads.sekc.admin.kernel.dto;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"name",
"shortDescription",
"description",
"level",
"state",
"competencyLevel",
"suppressable",
"owner",
"tag",
"resource",
"properties",
"viewSelection",
"featureSelection",
"extension",
"referrer",
"patternAssociation"
})
public class CheckPointDto implements LanguageElementDto{
@JsonProperty("name")
private String name;
@JsonProperty("shortDescription")
private String shortDescription;
@JsonProperty("description")
private String description;
@JsonProperty("level")
private Level level;
@JsonProperty("state")
private State state;
@JsonProperty("competencyLevel")
private CompetencyLevel competencyLevel;
@JsonProperty("suppressable")
private Boolean suppressable;
@JsonProperty("owner")
private Owner owner;
@JsonProperty("tag")
private List<Tag> tag = null;
@JsonProperty("resource")
private List<Resource> resource = null;
@JsonProperty("properties")
private List<Property> properties = null;
@JsonProperty("viewSelection")
private List<ViewSelection> viewSelection = null;
@JsonProperty("featureSelection")
private List<FeatureSelection> featureSelection = null;
@JsonProperty("extension")
private List<Extension> extension = null;
@JsonProperty("referrer")
private List<Referrer> referrer = null;
@JsonProperty("patternAssociation")
private List<PatternAssociation> patternAssociation = null;
@JsonIgnore
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
@JsonProperty("name")
public String getName() {
return name;
}
@JsonProperty("name")
public void setName(String name) {
this.name = name;
}
@JsonProperty("shortDescription")
public String getShortDescription() {
return shortDescription;
}
@JsonProperty("shortDescription")
public void setShortDescription(String shortDescription) {
this.shortDescription = shortDescription;
}
@JsonProperty("description")
public String getDescription() {
return description;
}
@JsonProperty("description")
public void setDescription(String description) {
this.description = description;
}
@JsonProperty("level")
public Level getLevel() {
return level;
}
@JsonProperty("level")
public void setLevel(Level level) {
this.level = level;
}
@JsonProperty("state")
public State getState() {
return state;
}
@JsonProperty("state")
public void setState(State state) {
this.state = state;
}
@JsonProperty("competencyLevel")
public CompetencyLevel getCompetencyLevel() {
return competencyLevel;
}
@JsonProperty("competencyLevel")
public void setCompetencyLevel(CompetencyLevel competencyLevel) {
this.competencyLevel = competencyLevel;
}
@JsonProperty("suppressable")
public Boolean getSuppressable() {
return suppressable;
}
@JsonProperty("suppressable")
public void setSuppressable(Boolean suppressable) {
this.suppressable = suppressable;
}
@JsonProperty("owner")
public Owner getOwner() {
return owner;
}
@JsonProperty("owner")
public void setOwner(Owner owner) {
this.owner = owner;
}
@JsonProperty("tag")
public List<Tag> getTag() {
return tag;
}
@JsonProperty("tag")
public void setTag(List<Tag> tag) {
this.tag = tag;
}
@JsonProperty("resource")
public List<Resource> getResource() {
return resource;
}
@JsonProperty("resource")
public void setResource(List<Resource> resource) {
this.resource = resource;
}
@JsonProperty("properties")
public List<Property> getProperties() {
return properties;
}
@JsonProperty("properties")
public void setProperties(List<Property> properties) {
this.properties = properties;
}
@JsonProperty("viewSelection")
public List<ViewSelection> getViewSelection() {
return viewSelection;
}
@JsonProperty("viewSelection")
public void setViewSelection(List<ViewSelection> viewSelection) {
this.viewSelection = viewSelection;
}
@JsonProperty("featureSelection")
public List<FeatureSelection> getFeatureSelection() {
return featureSelection;
}
@JsonProperty("featureSelection")
public void setFeatureSelection(List<FeatureSelection> featureSelection) {
this.featureSelection = featureSelection;
}
@JsonProperty("extension")
public List<Extension> getExtension() {
return extension;
}
@JsonProperty("extension")
public void setExtension(List<Extension> extension) {
this.extension = extension;
}
@JsonProperty("referrer")
public List<Referrer> getReferrer() {
return referrer;
}
@JsonProperty("referrer")
public void setReferrer(List<Referrer> referrer) {
this.referrer = referrer;
}
@JsonProperty("patternAssociation")
public List<PatternAssociation> getPatternAssociation() {
return patternAssociation;
}
@JsonProperty("patternAssociation")
public void setPatternAssociation(List<PatternAssociation> patternAssociation) {
this.patternAssociation = patternAssociation;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
@Override
public String toString() {
return "CheckPointDto{" + "name=" + name + ", shortDescription=" + shortDescription + ", description=" + description + ", level=" + level + ", state=" + state + ", competencyLevel=" + competencyLevel + ", suppressable=" + suppressable + ", owner=" + owner + ", tag=" + tag + ", resource=" + resource + ", properties=" + properties + ", viewSelection=" + viewSelection + ", featureSelection=" + featureSelection + ", extension=" + extension + ", referrer=" + referrer + ", patternAssociation=" + patternAssociation + ", additionalProperties=" + additionalProperties + '}';
}
}
| |
/*
* Copyright (C) 2015 University of Oregon
*
* You may distribute under the terms of either the GNU General Public
* License or the Apache License, as specified in the LICENSE file.
*
* For more information, see the LICENSE file.
*/
package vnmr.ui;
import java.awt.*;
import java.io.*;
import java.util.*;
import vnmr.util.*;
public class VpLayoutInfo
{
public int xmainMenu = 0;
public int xtoolBar = 0;
public int xsysToolBar = 0;
public int xgraphicsToolBar = 0;
public int xinfoBar = 0;
public int xparam = 0;
public int xlocator = 0;
public int vpX = 0;
public int vpY = 0;
public int xgrMaxH = 0; // graphMaxHeight
public int xgrMaxW = 0; // graphMaxWidth
public int xlocatorTop = 0; // locatorOnTop
public int xparamTop = 0; // panelOnTop
public int vpId = 0;
public float xcontrolX = 0;
public float xcontrolY = 0;
public String panelName;
public String verticalTabName;
public boolean bAvailable;
public boolean bNeedSave;
// for VTabbedToolPanel
final String tp_dividerPrefix = "tp_divider";
public int tp_selectedTab = -1;
public Hashtable tp_dividers = null;
public VpLayoutInfo(int num) {
this.vpId = num;
this.bAvailable = false;
this.bNeedSave = false;
this.panelName = null;
this.verticalTabName = null;
readLayoutInfo();
}
public void saveLayoutInfo() {
if (!bNeedSave)
return;
String f = Util.checkUiLayout(true, String.valueOf(vpId+1));
if (f == null)
return;
PrintWriter os = null;
try {
os = new PrintWriter(new FileWriter(f));
if (os == null)
return;
if (xmainMenu > 0)
os.println("mainMenu on");
else
os.println("mainMenu off");
if (xtoolBar > 0)
os.println("toolBar on");
else
os.println("toolBar off");
if (xinfoBar > 0)
os.println("infoBar on");
else
os.println("infoBar off");
if (xsysToolBar > 0)
os.println("sysToolBar on");
else
os.println("sysToolBar off");
if (xgraphicsToolBar > 0)
os.println("graphicsToolBar on");
else
os.println("graphicsToolBar off");
os.println("paramLocX "+Float.toString(xcontrolX));
os.println("paramLocY "+Float.toString(xcontrolY));
if (xparam > 0)
os.println("param on");
else
os.println("param off");
if (xlocator > 0)
os.println("locator on");
else
os.println("locator off");
if (xgrMaxH > 0) {
os.println("graphMaxHeight yes");
if (xparamTop > 0)
os.println("panelOnTop yes");
else
os.println("panelOnTop no");
}
else
os.println("graphMaxHeight no");
if (xgrMaxW > 0) {
os.println("graphMaxWidth yes");
if (xlocatorTop > 0)
os.println("locatorOnTop yes");
else
os.println("locatorOnTop no");
}
else
os.println("graphMaxWidth no");
if (panelName != null)
os.println("panel "+panelName);
if (verticalTabName != null)
os.println("verticalTabName "+verticalTabName);
if(tp_selectedTab >= 0)
os.println("tp_selectedTab " + tp_selectedTab);
String key;
int value;
if(tp_dividers != null && tp_dividers.size() > 0) {
for(Enumeration e=tp_dividers.keys(); e.hasMoreElements();) {
key = (String)e.nextElement();
value = ((Integer)tp_dividers.get(key)).intValue();
os.println(tp_dividerPrefix+":"+key +" "+value);
}
}
}
catch(IOException er) { }
os.close();
}
public void readLayoutInfo() {
String data, atr, v;
StringTokenizer tok;
int iv;
float fv;
String f = Util.checkUiLayout(false, String.valueOf(vpId+1));
if(tp_dividers == null)
tp_dividers = new Hashtable();
else tp_dividers.clear();
if (f == null)
return;
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(f));
if (in == null)
return;
while ((data = in.readLine()) != null)
{
if (data.length() < 2)
continue;
tok = new StringTokenizer(data, " ,\n\t");
if (!tok.hasMoreTokens())
continue;
atr = tok.nextToken();
if (!tok.hasMoreTokens())
continue;
data = tok.nextToken();
if (atr.equals("mainMenu")) {
if (data.equals("off"))
xmainMenu = 0;
else
xmainMenu = 1;
continue;
}
if (atr.equals("paramLocX")) {
xcontrolX = Float.parseFloat(data);
continue;
}
if (atr.equals("paramLocY")) {
xcontrolY = Float.parseFloat(data);
continue;
}
if (atr.equals("toolBar")) {
if (data.equals("off"))
xtoolBar = 0;
else
xtoolBar = 1;
continue;
}
if (atr.equals("infoBar")) {
if (data.equals("off"))
xinfoBar = 0;
else
xinfoBar = 1;
continue;
}
if (atr.equals("param")) {
if (data.equals("on"))
xparam = 1;
else
xparam = 0;
continue;
}
if (atr.equals("locator")) {
if (data.equals("on"))
xlocator = 1;
else
xlocator = 0;
continue;
}
if (atr.equals("graphMaxHeight")) {
if (data.equals("yes"))
xgrMaxH = 1;
else
xgrMaxH = 0;
continue;
}
if (atr.equals("graphMaxWidth")) {
if (data.equals("yes"))
xgrMaxW = 1;
else
xgrMaxW = 0;
continue;
}
if (atr.equals("panelOnTop")) {
if (data.equals("yes"))
xparamTop = 1;
else
xparamTop = 0;
continue;
}
if (atr.equals("locatorOnTop")) {
if (data.equals("yes"))
xlocatorTop = 1;
else
xlocatorTop = 0;
continue;
}
if (atr.equals("panel")) {
panelName = data;
continue;
}
if (atr.equals("verticalTabName")) {
verticalTabName = data;
continue;
}
if (atr.equals("tp_selectedTab")) {
tp_selectedTab = Integer.valueOf(data).intValue();
continue;
}
if (atr.startsWith(tp_dividerPrefix)) {
tp_dividers.put(atr.substring(tp_dividerPrefix.length()+1),
new Integer(data));
continue;
}
if (atr.equals("sysToolBar")) {
if (data.equals("off"))
xsysToolBar = 0;
else
xsysToolBar = 1;
continue;
}
if (atr.equals("graphicsToolBar")) {
if (data.equals("off"))
xgraphicsToolBar = 0;
else
xgraphicsToolBar = 1;
continue;
}
} // while
}
catch(IOException e) { }
try {
in.close();
} catch(IOException ee) { }
bAvailable = true;
}
public String getPanelName() {
return panelName;
}
public void setPanelName(String name) {
panelName = name;
}
public String getVerticalTabName() {
return verticalTabName;
}
public void setVerticalTabName(String name) {
verticalTabName = name;
}
}
| |
/*
* Copyright 2008-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.griffon.compile.core;
import static org.codehaus.griffon.compile.core.MethodDescriptor.*;
/**
* @author Andres Almiray
*/
public interface MVCAwareConstants extends BaseConstants {
String MVC_GROUP_MANAGER_PROPERTY = "mvcGroupManager";
String MVC_CALLABLE_TYPE = "griffon.core.mvc.MVCCallable";
String GRIFFON_MODEL_TYPE = "griffon.core.artifact.GriffonModel";
String GRIFFON_VIEW_TYPE = "griffon.core.artifact.GriffonView";
String GRIFFON_CONTROLLER_TYPE = "griffon.core.artifact.GriffonController";
String MVC_GROUP = "griffon.core.mvc.MVCGroup";
String MVC_HANDLER_TYPE = "griffon.core.mvc.MVCHandler";
String MVC_GROUP_MANAGER_TYPE = "griffon.core.mvc.MVCGroupManager";
String GRIFFON_MVC_ARTIFACT_TYPE = "griffon.core.artifact.GriffonMvcArtifact";
String METHOD_CREATE_MVC_GROUP = "createMVCGroup";
String METHOD_BUILD_MVC_GROUP = "buildMVCGroup";
String METHOD_WITH_MVC_GROUP = "withMVCGroup";
String METHOD_DESTROY_MVC_GROUP = "destroyMVCGroup";
String M = "M";
String V = "V";
String C = "C";
MethodDescriptor[] METHODS = new MethodDescriptor[]{
method(
type(VOID),
METHOD_DESTROY_MVC_GROUP,
args(annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
type(MVC_GROUP),
METHOD_BUILD_MVC_GROUP,
args(annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
type(MVC_GROUP),
METHOD_BUILD_MVC_GROUP,
args(annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING), type(JAVA_LANG_STRING))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
type(MVC_GROUP),
METHOD_BUILD_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
type(MVC_GROUP),
METHOD_BUILD_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
type(MVC_GROUP),
METHOD_BUILD_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
type(MVC_GROUP),
METHOD_BUILD_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)),
METHOD_CREATE_MVC_GROUP,
args(annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)),
METHOD_CREATE_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)),
METHOD_CREATE_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)),
METHOD_CREATE_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)),
METHOD_CREATE_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING))
),
annotatedMethod(
annotations(JAVAX_ANNOTATION_NONNULL),
typeWithParams(JAVA_UTIL_LIST, wildcard(GRIFFON_MVC_ARTIFACT_TYPE)),
METHOD_CREATE_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT))
),
method(
type(VOID),
typeParams(
typeParam(M, GRIFFON_MODEL_TYPE),
typeParam(V, GRIFFON_VIEW_TYPE),
typeParam(C, GRIFFON_CONTROLLER_TYPE)
),
METHOD_WITH_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), MVC_CALLABLE_TYPE, M, V, C))
),
method(
type(VOID),
typeParams(
typeParam(M, GRIFFON_MODEL_TYPE),
typeParam(V, GRIFFON_VIEW_TYPE),
typeParam(C, GRIFFON_CONTROLLER_TYPE)
),
METHOD_WITH_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), MVC_CALLABLE_TYPE, M, V, C))
),
method(
type(VOID),
typeParams(
typeParam(M, GRIFFON_MODEL_TYPE),
typeParam(V, GRIFFON_VIEW_TYPE),
typeParam(C, GRIFFON_CONTROLLER_TYPE)
),
METHOD_WITH_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), MVC_CALLABLE_TYPE, M, V, C))
),
method(
type(VOID),
typeParams(
typeParam(M, GRIFFON_MODEL_TYPE),
typeParam(V, GRIFFON_VIEW_TYPE),
typeParam(C, GRIFFON_CONTROLLER_TYPE)
),
METHOD_WITH_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), MVC_CALLABLE_TYPE, M, V, C))
),
method(
type(VOID),
typeParams(
typeParam(M, GRIFFON_MODEL_TYPE),
typeParam(V, GRIFFON_VIEW_TYPE),
typeParam(C, GRIFFON_CONTROLLER_TYPE)
),
METHOD_WITH_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), MVC_CALLABLE_TYPE, M, V, C))
),
method(
type(VOID),
typeParams(
typeParam(M, GRIFFON_MODEL_TYPE),
typeParam(V, GRIFFON_VIEW_TYPE),
typeParam(C, GRIFFON_CONTROLLER_TYPE)
),
METHOD_WITH_MVC_GROUP,
args(
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_UTIL_MAP, JAVA_LANG_STRING, JAVA_LANG_OBJECT),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), JAVA_LANG_STRING),
annotatedType(annotations(JAVAX_ANNOTATION_NONNULL), MVC_CALLABLE_TYPE, M, V, C))
)
};
}
| |
/*
* Copyright 2006-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.osgi.blueprint.config;
import java.net.Socket;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import junit.framework.TestCase;
import org.osgi.service.blueprint.container.BlueprintContainer;
import org.osgi.service.blueprint.container.NoSuchComponentException;
import org.springframework.beans.BeanMetadataElement;
import org.springframework.beans.BeansException;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.PropertyValue;
import org.springframework.beans.factory.config.BeanDefinitionHolder;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.config.ConstructorArgumentValues.ValueHolder;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.core.io.ClassPathResource;
import org.springframework.osgi.blueprint.TestComponent;
import org.springframework.osgi.blueprint.container.SpringBlueprintContainer;
import org.springframework.osgi.blueprint.container.support.BlueprintEditorRegistrar;
import org.springframework.osgi.context.support.BundleContextAwareProcessor;
import org.springframework.osgi.mock.MockBundleContext;
/**
*
* @author Costin Leau
*
*/
public class ComponentSubElementTest extends TestCase {
private static final String CONFIG = "component-subelements.xml";
private GenericApplicationContext context;
private BlueprintContainer BlueprintContainer;
private XmlBeanDefinitionReader reader;
protected MockBundleContext bundleContext;
protected void setUp() throws Exception {
bundleContext = new MockBundleContext();
context = new GenericApplicationContext();
context.setClassLoader(getClass().getClassLoader());
context.getBeanFactory().addBeanPostProcessor(new BundleContextAwareProcessor(bundleContext));
context.addBeanFactoryPostProcessor(new BeanFactoryPostProcessor() {
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
beanFactory.addPropertyEditorRegistrar(new BlueprintEditorRegistrar());
}
});
reader = new XmlBeanDefinitionReader(context);
reader.loadBeanDefinitions(new ClassPathResource(CONFIG, getClass()));
context.refresh();
BlueprintContainer = new SpringBlueprintContainer(context);
}
protected void tearDown() throws Exception {
context.close();
context = null;
}
public void testNumberOfBeans() throws Exception {
assertTrue("not enough beans found", context.getBeanDefinitionCount() > 4);
}
public void testConstructorArg() throws Exception {
AbstractBeanDefinition def = (AbstractBeanDefinition) context.getBeanDefinition("constructor-arg");
assertEquals(Integer.class.getName(), def.getBeanClassName());
assertEquals("description", def.getDescription());
ValueHolder argumentValue = def.getConstructorArgumentValues().getArgumentValue(0, int.class);
assertNotNull(argumentValue);
}
public void testConstructorRef() throws Exception {
AbstractBeanDefinition def = (AbstractBeanDefinition) context.getBeanDefinition("constructor-arg-ref");
assertEquals(String.class.getName(), def.getBeanClassName());
assertEquals("description2", def.getDescription());
assertEquals(1, def.getConstructorArgumentValues().getArgumentCount());
}
public void testPropertyInline() throws Exception {
AbstractBeanDefinition def = (AbstractBeanDefinition) context.getBeanDefinition("propertyValueInline");
assertEquals(Socket.class.getName(), def.getBeanClassName());
MutablePropertyValues propertyValues = def.getPropertyValues();
PropertyValue propertyValue = propertyValues.getPropertyValue("keepAlive");
assertNotNull(propertyValue);
assertTrue(propertyValue.getValue() instanceof BeanMetadataElement);
}
public void testValueRef() throws Exception {
AbstractBeanDefinition def = (AbstractBeanDefinition) context.getBeanDefinition("propertyValueRef");
assertEquals(Socket.class.getName(), def.getBeanClassName());
assertNotNull(def.getPropertyValues().getPropertyValue("sendBufferSize"));
}
public void testpropertyValueNested() throws Exception {
AbstractBeanDefinition def = (AbstractBeanDefinition) context.getBeanDefinition("propertyValueNested");
assertEquals(Socket.class.getName(), def.getBeanClassName());
PropertyValue nested = def.getPropertyValues().getPropertyValue("sendBufferSize");
assertTrue(nested.getValue() instanceof BeanDefinitionHolder);
}
public void testArray() throws Exception {
TestComponent cmpn = (TestComponent) context.getBean("array");
Object prop = cmpn.getPropA();
assertTrue(prop instanceof Object[]);
Object[] array = (Object[]) prop;
assertEquals(Character.class, array[0].getClass());
assertEquals("literal2", array[1]);
assertNull(array[2]);
}
public void testMixedCollection() throws Exception {
TestComponent cmpn = (TestComponent) context.getBean("mixedCollection");
Object prop = cmpn.getPropA();
assertTrue(prop instanceof List);
List<?> list = (List<?>) prop;
assertEquals("literal", list.get(0));
assertEquals(Integer[].class, list.get(1).getClass());
assertEquals(int[].class, list.get(2).getClass());
assertEquals(new Integer(2), ((Integer[]) list.get(1))[0]);
assertEquals(5, ((int[]) list.get(2))[1]);
}
public void testList() throws Exception {
TestComponent cmpn = (TestComponent) context.getBean("list");
Object prop = cmpn.getPropA();
assertTrue(prop instanceof List);
List<?> list = (List<?>) prop;
assertEquals("value", list.get(0));
assertEquals("idref", list.get(1));
assertNull(list.get(2));
assertSame(context.getBean("idref"), list.get(3));
}
public void testSet() throws Exception {
TestComponent cmpn = (TestComponent) context.getBean("set");
Object prop = cmpn.getPropA();
assertTrue(prop instanceof Set);
Set<?> set = (Set<?>) prop;
assertTrue(set.contains("value"));
assertTrue(set.contains("idref"));
assertTrue(set.contains(null));
assertTrue(set.contains(context.getBean("idref")));
}
public void testMap() throws Exception {
TestComponent cmpn = (TestComponent) context.getBean("map");
Object prop = cmpn.getPropA();
assertTrue(prop instanceof Map);
Map<?, ?> map = (Map) prop;
assertEquals("bar", map.get("foo"));
assertEquals(context.getBean("set"), map.get(context.getBean("list")));
assertEquals(context.getBean("list"), map.get(context.getBean("set")));
}
public void testProps() throws Exception {
TestComponent cmpn = (TestComponent) context.getBean("props");
Object prop = cmpn.getPropA();
assertTrue(prop instanceof Properties);
Properties props = (Properties) prop;
assertEquals("two", props.get("one"));
assertEquals("smith", props.get("aero"));
}
public void testAmbigousComponent() throws Exception {
System.out.println(context.getBean("ambigousComponent"));
}
public void testDependsOnTest() throws Exception {
try {
System.out.println(BlueprintContainer.getComponentInstance("dependsOnComponent"));
fail("expected validation exception");
} catch (NoSuchComponentException nsce) {
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/enums/user_list_membership_status.proto
package com.google.ads.googleads.v9.enums;
/**
* <pre>
* Membership status of this user list. Indicates whether a user list is open
* or active. Only open user lists can accumulate more users and can be used for
* targeting.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.enums.UserListMembershipStatusEnum}
*/
public final class UserListMembershipStatusEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.enums.UserListMembershipStatusEnum)
UserListMembershipStatusEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserListMembershipStatusEnum.newBuilder() to construct.
private UserListMembershipStatusEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserListMembershipStatusEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UserListMembershipStatusEnum();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private UserListMembershipStatusEnum(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.UserListMembershipStatusProto.internal_static_google_ads_googleads_v9_enums_UserListMembershipStatusEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.enums.UserListMembershipStatusProto.internal_static_google_ads_googleads_v9_enums_UserListMembershipStatusEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum.class, com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum.Builder.class);
}
/**
* <pre>
* Enum containing possible user list membership statuses.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v9.enums.UserListMembershipStatusEnum.UserListMembershipStatus}
*/
public enum UserListMembershipStatus
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Open status - List is accruing members and can be targeted to.
* </pre>
*
* <code>OPEN = 2;</code>
*/
OPEN(2),
/**
* <pre>
* Closed status - No new members being added. Cannot be used for targeting.
* </pre>
*
* <code>CLOSED = 3;</code>
*/
CLOSED(3),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Open status - List is accruing members and can be targeted to.
* </pre>
*
* <code>OPEN = 2;</code>
*/
public static final int OPEN_VALUE = 2;
/**
* <pre>
* Closed status - No new members being added. Cannot be used for targeting.
* </pre>
*
* <code>CLOSED = 3;</code>
*/
public static final int CLOSED_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static UserListMembershipStatus valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static UserListMembershipStatus forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return OPEN;
case 3: return CLOSED;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<UserListMembershipStatus>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
UserListMembershipStatus> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<UserListMembershipStatus>() {
public UserListMembershipStatus findValueByNumber(int number) {
return UserListMembershipStatus.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum.getDescriptor().getEnumTypes().get(0);
}
private static final UserListMembershipStatus[] VALUES = values();
public static UserListMembershipStatus valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private UserListMembershipStatus(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v9.enums.UserListMembershipStatusEnum.UserListMembershipStatus)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum other = (com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Membership status of this user list. Indicates whether a user list is open
* or active. Only open user lists can accumulate more users and can be used for
* targeting.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.enums.UserListMembershipStatusEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.enums.UserListMembershipStatusEnum)
com.google.ads.googleads.v9.enums.UserListMembershipStatusEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.enums.UserListMembershipStatusProto.internal_static_google_ads_googleads_v9_enums_UserListMembershipStatusEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.enums.UserListMembershipStatusProto.internal_static_google_ads_googleads_v9_enums_UserListMembershipStatusEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum.class, com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.enums.UserListMembershipStatusProto.internal_static_google_ads_googleads_v9_enums_UserListMembershipStatusEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum build() {
com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum buildPartial() {
com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum result = new com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum) {
return mergeFrom((com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum other) {
if (other == com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.enums.UserListMembershipStatusEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.enums.UserListMembershipStatusEnum)
private static final com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum();
}
public static com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserListMembershipStatusEnum>
PARSER = new com.google.protobuf.AbstractParser<UserListMembershipStatusEnum>() {
@java.lang.Override
public UserListMembershipStatusEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UserListMembershipStatusEnum(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UserListMembershipStatusEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserListMembershipStatusEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.enums.UserListMembershipStatusEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.stratos.cloud.controller.internal;
import com.hazelcast.core.HazelcastInstance;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.stratos.cloud.controller.context.CloudControllerContext;
import org.apache.stratos.cloud.controller.exception.CloudControllerException;
import org.apache.stratos.cloud.controller.messaging.publisher.TopologyEventSynchronizer;
import org.apache.stratos.cloud.controller.messaging.receiver.application.ApplicationEventReceiver;
import org.apache.stratos.cloud.controller.messaging.receiver.cluster.status.ClusterStatusTopicReceiver;
import org.apache.stratos.cloud.controller.messaging.receiver.instance.status.InstanceStatusTopicReceiver;
import org.apache.stratos.cloud.controller.services.CloudControllerService;
import org.apache.stratos.cloud.controller.services.impl.CloudControllerServiceImpl;
import org.apache.stratos.common.Component;
import org.apache.stratos.common.services.ComponentActivationEventListener;
import org.apache.stratos.common.services.ComponentStartUpSynchronizer;
import org.apache.stratos.common.services.DistributedObjectProvider;
import org.apache.stratos.common.threading.StratosThreadPool;
import org.apache.stratos.messaging.broker.publish.EventPublisherPool;
import org.apache.stratos.messaging.util.MessagingUtil;
import org.osgi.framework.BundleContext;
import org.osgi.service.component.ComponentContext;
import org.wso2.carbon.ntask.core.service.TaskService;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.service.RegistryService;
import org.wso2.carbon.registry.core.session.UserRegistry;
import org.wso2.carbon.utils.ConfigurationContextService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* Registering Cloud Controller Service.
*
* @scr.component name="org.apache.stratos.cloud.controller" immediate="true"
* @scr.reference name="hazelcast.instance.service" interface="com.hazelcast.core.HazelcastInstance"
* cardinality="0..1"policy="dynamic" bind="setHazelcastInstance" unbind="unsetHazelcastInstance"
* @scr.reference name="distributedObjectProvider" interface="org.apache.stratos.common.services.DistributedObjectProvider"
* cardinality="1..1" policy="dynamic" bind="setDistributedObjectProvider" unbind="unsetDistributedObjectProvider"
* @scr.reference name="componentStartUpSynchronizer" interface="org.apache.stratos.common.services.ComponentStartUpSynchronizer"
* cardinality="1..1" policy="dynamic" bind="setComponentStartUpSynchronizer" unbind="unsetComponentStartUpSynchronizer"
* @scr.reference name="ntask.component" interface="org.wso2.carbon.ntask.core.service.TaskService"
* cardinality="1..1" policy="dynamic" bind="setTaskService" unbind="unsetTaskService"
* @scr.reference name="registry.service" interface="org.wso2.carbon.registry.core.service.RegistryService"
* cardinality="1..1" policy="dynamic" bind="setRegistryService" unbind="unsetRegistryService"
* @scr.reference name="config.context.service" interface="org.wso2.carbon.utils.ConfigurationContextService"
* cardinality="1..1" policy="dynamic" bind="setConfigurationContextService" unbind="unsetConfigurationContextService"
*/
public class CloudControllerServiceComponent {
private static final Log log = LogFactory.getLog(CloudControllerServiceComponent.class);
private static final String CLOUD_CONTROLLER_COORDINATOR_LOCK = "cloud.controller.coordinator.lock";
private static final String THREAD_POOL_ID = "cloud.controller.thread.pool";
private static final String SCHEDULER_THREAD_POOL_ID = "cloud.controller.scheduler.thread.pool";
private static final int THREAD_POOL_SIZE = 10;
private static final int SCHEDULER_THREAD_POOL_SIZE = 5;
private ClusterStatusTopicReceiver clusterStatusTopicReceiver;
private InstanceStatusTopicReceiver instanceStatusTopicReceiver;
private ApplicationEventReceiver applicationEventReceiver;
private ExecutorService executorService;
private ScheduledExecutorService scheduler;
protected void activate(final ComponentContext context) {
if (log.isDebugEnabled()) {
log.debug("Activating CloudControllerServiceComponent...");
}
try {
executorService = StratosThreadPool.getExecutorService(THREAD_POOL_ID, THREAD_POOL_SIZE);
scheduler = StratosThreadPool.getScheduledExecutorService(SCHEDULER_THREAD_POOL_ID,
SCHEDULER_THREAD_POOL_SIZE);
Runnable cloudControllerActivator = new Runnable() {
@Override
public void run() {
try {
ComponentStartUpSynchronizer componentStartUpSynchronizer =
ServiceReferenceHolder.getInstance().getComponentStartUpSynchronizer();
// Register cloud controller service
BundleContext bundleContext = context.getBundleContext();
bundleContext.registerService(CloudControllerService.class.getName(),
new CloudControllerServiceImpl(), null);
if (CloudControllerContext.getInstance().isClustered()) {
Thread coordinatorElectorThread = new Thread() {
@Override
public void run() {
ServiceReferenceHolder.getInstance().getHazelcastInstance()
.getLock(CLOUD_CONTROLLER_COORDINATOR_LOCK).lock();
String localMemberId = ServiceReferenceHolder.getInstance().getHazelcastInstance()
.getCluster().getLocalMember().getUuid();
log.info("Elected member [" + localMemberId + "] " +
"as the cloud controller coordinator of the cluster");
CloudControllerContext.getInstance().setCoordinator(true);
executeCoordinatorTasks();
}
};
coordinatorElectorThread.setName("Cloud controller coordinator elector thread");
executorService.submit(coordinatorElectorThread);
} else {
executeCoordinatorTasks();
}
componentStartUpSynchronizer.waitForAxisServiceActivation(Component.CloudController,
"CloudControllerService");
componentStartUpSynchronizer.setComponentStatus(Component.CloudController, true);
log.info("Cloud controller service component activated");
} catch (Exception e) {
log.error("Could not activate cloud controller service component", e);
}
}
};
Thread cloudControllerActivatorThread = new Thread(cloudControllerActivator);
cloudControllerActivatorThread.start();
} catch (Exception e) {
log.error("Could not activate cloud controller service component", e);
}
}
private void executeCoordinatorTasks() {
applicationEventReceiver = new ApplicationEventReceiver();
applicationEventReceiver.setExecutorService(executorService);
applicationEventReceiver.execute();
if (log.isInfoEnabled()) {
log.info("Application event receiver thread started");
}
clusterStatusTopicReceiver = new ClusterStatusTopicReceiver();
clusterStatusTopicReceiver.setExecutorService(executorService);
clusterStatusTopicReceiver.execute();
if (log.isInfoEnabled()) {
log.info("Cluster status event receiver thread started");
}
instanceStatusTopicReceiver = new InstanceStatusTopicReceiver();
instanceStatusTopicReceiver.setExecutorService(executorService);
instanceStatusTopicReceiver.execute();
if (log.isInfoEnabled()) {
log.info("Instance status event receiver thread started");
}
if (log.isInfoEnabled()) {
log.info("Scheduling topology synchronizer task");
}
ComponentStartUpSynchronizer componentStartUpSynchronizer =
ServiceReferenceHolder.getInstance().getComponentStartUpSynchronizer();
if (componentStartUpSynchronizer.isEnabled()) {
componentStartUpSynchronizer.addEventListener(new ComponentActivationEventListener() {
@Override
public void activated(Component component) {
if (component == Component.StratosManager) {
scheduleEventSynchronizers();
}
}
});
} else {
scheduleEventSynchronizers();
}
}
private void scheduleEventSynchronizers() {
Runnable topologySynchronizer = new TopologyEventSynchronizer();
scheduler.scheduleAtFixedRate(topologySynchronizer, 0, 1, TimeUnit.MINUTES);
}
protected void setTaskService(TaskService taskService) {
if (log.isDebugEnabled()) {
log.debug("Setting the task service");
}
ServiceReferenceHolder.getInstance().setTaskService(taskService);
}
protected void unsetTaskService(TaskService taskService) {
if (log.isDebugEnabled()) {
log.debug("Un-setting the task service");
}
ServiceReferenceHolder.getInstance().setTaskService(null);
}
protected void setRegistryService(RegistryService registryService) {
if (log.isDebugEnabled()) {
log.debug("Setting the Registry Service");
}
try {
UserRegistry registry = registryService.getGovernanceSystemRegistry();
ServiceReferenceHolder.getInstance().setRegistry(registry);
} catch (RegistryException e) {
String msg = "Failed when retrieving Governance System Registry.";
log.error(msg, e);
throw new CloudControllerException(msg, e);
}
}
protected void unsetRegistryService(RegistryService registryService) {
if (log.isDebugEnabled()) {
log.debug("Un-setting the Registry Service");
}
ServiceReferenceHolder.getInstance().setRegistry(null);
}
protected void setConfigurationContextService(ConfigurationContextService cfgCtxService) {
ServiceReferenceHolder.getInstance().setAxisConfiguration(
cfgCtxService.getServerConfigContext().getAxisConfiguration());
}
protected void unsetConfigurationContextService(ConfigurationContextService cfgCtxService) {
ServiceReferenceHolder.getInstance().setAxisConfiguration(null);
}
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
ServiceReferenceHolder.getInstance().setHazelcastInstance(hazelcastInstance);
}
public void unsetHazelcastInstance(HazelcastInstance hazelcastInstance) {
ServiceReferenceHolder.getInstance().setHazelcastInstance(null);
}
protected void setDistributedObjectProvider(DistributedObjectProvider distributedObjectProvider) {
ServiceReferenceHolder.getInstance().setDistributedObjectProvider(distributedObjectProvider);
}
protected void unsetDistributedObjectProvider(DistributedObjectProvider distributedObjectProvider) {
ServiceReferenceHolder.getInstance().setDistributedObjectProvider(null);
}
protected void setComponentStartUpSynchronizer(ComponentStartUpSynchronizer componentStartUpSynchronizer) {
ServiceReferenceHolder.getInstance().setComponentStartUpSynchronizer(componentStartUpSynchronizer);
}
protected void unsetComponentStartUpSynchronizer(ComponentStartUpSynchronizer componentStartUpSynchronizer) {
ServiceReferenceHolder.getInstance().setComponentStartUpSynchronizer(null);
}
protected void deactivate(ComponentContext ctx) {
// Close event publisher connections to message broker
try {
EventPublisherPool.close(MessagingUtil.Topics.TOPOLOGY_TOPIC.getTopicName());
} catch (Exception e) {
log.warn("An error occurred while closing cloud controller topology event publisher", e);
}
// Shutdown executor service
shutdownExecutorService(THREAD_POOL_ID);
// Shutdown scheduler
shutdownScheduledExecutorService(SCHEDULER_THREAD_POOL_ID);
}
private void shutdownExecutorService(String executorServiceId) {
ExecutorService executorService = StratosThreadPool.getExecutorService(executorServiceId, 1);
if (executorService != null) {
shutdownExecutorService(executorService);
}
}
private void shutdownScheduledExecutorService(String executorServiceId) {
ExecutorService executorService = StratosThreadPool.getScheduledExecutorService(executorServiceId, 1);
if (executorService != null) {
shutdownExecutorService(executorService);
}
}
private void shutdownExecutorService(ExecutorService executorService) {
try {
executorService.shutdownNow();
} catch (Exception e) {
log.warn("An error occurred while shutting down executor service", e);
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.template.postfix.util;
import com.intellij.codeInsight.CodeInsightServicesUtil;
import com.intellij.codeInsight.template.postfix.templates.PostfixTemplateExpressionSelector;
import com.intellij.codeInsight.template.postfix.templates.PostfixTemplateExpressionSelectorBase;
import com.intellij.codeInsight.template.postfix.templates.PostfixTemplatePsiInfo;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.psi.*;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiExpressionTrimRenderer;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.introduceVariable.IntroduceVariableBase;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
import static com.intellij.openapi.util.Conditions.and;
public abstract class JavaPostfixTemplatesUtils {
private JavaPostfixTemplatesUtils() {
}
public static PostfixTemplateExpressionSelector selectorTopmost() {
return selectorTopmost(Conditions.<PsiElement>alwaysTrue());
}
public static PostfixTemplateExpressionSelector selectorTopmost(Condition<PsiElement> additionalFilter) {
return new PostfixTemplateExpressionSelectorBase(additionalFilter) {
@Override
protected List<PsiElement> getNonFilteredExpressions(@NotNull PsiElement context, @NotNull Document document, int offset) {
return ContainerUtil.<PsiElement>createMaybeSingletonList(getTopmostExpression(context));
}
@Override
protected Condition<PsiElement> getFilters(int offset) {
return and(super.getFilters(offset), getPsiErrorFilter());
}
@NotNull
@Override
public Function<PsiElement, String> getRenderer() {
return JavaPostfixTemplatesUtils.getRenderer();
}
};
}
public static PostfixTemplateExpressionSelector selectorAllExpressionsWithCurrentOffset() {
return selectorAllExpressionsWithCurrentOffset(Conditions.<PsiElement>alwaysTrue());
}
public static PostfixTemplateExpressionSelector selectorAllExpressionsWithCurrentOffset(final Condition<PsiElement> additionalFilter) {
return new PostfixTemplateExpressionSelectorBase(additionalFilter) {
@Override
protected List<PsiElement> getNonFilteredExpressions(@NotNull PsiElement context, @NotNull Document document, int offset) {
return ContainerUtil.<PsiElement>newArrayList(IntroduceVariableBase.collectExpressions(context.getContainingFile(), document,
Math.max(offset - 1, 0), false));
}
@NotNull
@Override
public List<PsiElement> getExpressions(@NotNull PsiElement context, @NotNull Document document, int offset) {
if (DumbService.getInstance(context.getProject()).isDumb()) return Collections.emptyList();
List<PsiElement> expressions = super.getExpressions(context, document, offset);
if (!expressions.isEmpty()) return expressions;
return ContainerUtil.filter(ContainerUtil.<PsiElement>createMaybeSingletonList(getTopmostExpression(context)), getFilters(offset));
}
@NotNull
@Override
public Function<PsiElement, String> getRenderer() {
return JavaPostfixTemplatesUtils.getRenderer();
}
};
}
public static final PostfixTemplatePsiInfo JAVA_PSI_INFO = new PostfixTemplatePsiInfo() {
@NotNull
@Override
public PsiElement createExpression(@NotNull PsiElement context,
@NotNull String prefix,
@NotNull String suffix) {
PsiElementFactory factory = JavaPsiFacade.getInstance(context.getProject()).getElementFactory();
return factory.createExpressionFromText(prefix + context.getText() + suffix, context);
}
@NotNull
@Override
public PsiExpression getNegatedExpression(@NotNull PsiElement element) {
assert element instanceof PsiExpression;
return CodeInsightServicesUtil.invertCondition((PsiExpression)element);
}
};
public static Condition<PsiElement> IS_NUMBER = new Condition<PsiElement>() {
@Override
public boolean value(PsiElement element) {
return element instanceof PsiExpression && isNumber(((PsiExpression)element).getType());
}
};
public static Condition<PsiElement> IS_BOOLEAN = new Condition<PsiElement>() {
@Override
public boolean value(PsiElement element) {
return element instanceof PsiExpression && isBoolean(((PsiExpression)element).getType());
}
};
public static Condition<PsiElement> IS_THROWABLE = new Condition<PsiElement>() {
@Override
public boolean value(PsiElement element) {
return element instanceof PsiExpression && isThrowable(((PsiExpression)element).getType());
}
};
public static Condition<PsiElement> IS_NON_VOID = new Condition<PsiElement>() {
@Override
public boolean value(PsiElement element) {
return element instanceof PsiExpression && isNonVoid(((PsiExpression)element).getType());
}
};
public static Condition<PsiElement> IS_NOT_PRIMITIVE = new Condition<PsiElement>() {
@Override
public boolean value(PsiElement element) {
return element instanceof PsiExpression && isNotPrimitiveTypeExpression((PsiExpression)element);
}
};
public static Condition<PsiElement> IS_ITERABLE_OR_ARRAY = new Condition<PsiElement>() {
@Override
public boolean value(PsiElement element) {
if (!(element instanceof PsiExpression)) return false;
PsiType type = ((PsiExpression)element).getType();
return isArray(type) || isIterable(type);
}
};
@Contract("null -> false")
public static boolean isNotPrimitiveTypeExpression(@Nullable PsiExpression expression) {
if (expression == null) {
return false;
}
PsiType type = expression.getType();
return type != null && !(type instanceof PsiPrimitiveType);
}
@Contract("null -> false")
public static boolean isIterable(@Nullable PsiType type) {
return type != null && InheritanceUtil.isInheritor(type, CommonClassNames.JAVA_LANG_ITERABLE);
}
@Contract("null -> false")
public static boolean isThrowable(@Nullable PsiType type) {
return type != null && InheritanceUtil.isInheritor(type, CommonClassNames.JAVA_LANG_THROWABLE);
}
@Contract("null -> false")
public static boolean isArray(@Nullable PsiType type) {
return type != null && type instanceof PsiArrayType;
}
@Contract("null -> false")
public static boolean isBoolean(@Nullable PsiType type) {
return type != null && (PsiType.BOOLEAN.equals(type) || PsiType.BOOLEAN.equals(PsiPrimitiveType.getUnboxedType(type)));
}
@Contract("null -> false")
public static boolean isNonVoid(@Nullable PsiType type) {
return type != null && !PsiType.VOID.equals(type);
}
@Contract("null -> false")
public static boolean isNumber(@Nullable PsiType type) {
if (type == null) {
return false;
}
if (PsiType.INT.equals(type) || PsiType.BYTE.equals(type) || PsiType.LONG.equals(type)) {
return true;
}
PsiPrimitiveType unboxedType = PsiPrimitiveType.getUnboxedType(type);
return PsiType.INT.equals(unboxedType) || PsiType.BYTE.equals(unboxedType) || PsiType.LONG.equals(unboxedType);
}
@NotNull
public static Function<PsiElement, String> getRenderer() {
return new Function<PsiElement, String>() {
@Override
public String fun(PsiElement element) {
assert element instanceof PsiExpression;
return new PsiExpressionTrimRenderer.RenderFunction().fun((PsiExpression)element);
}
};
}
@Nullable
public static PsiExpression getTopmostExpression(PsiElement context) {
PsiExpressionStatement statement = PsiTreeUtil.getNonStrictParentOfType(context, PsiExpressionStatement.class);
return statement != null ? statement.getExpression() : null;
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.unit.reflection.util;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.lang.reflect.Type;
import java.util.List;
import jakarta.enterprise.util.TypeLiteral;
import org.junit.Assert;
import org.jboss.weld.resolution.AssignabilityRules;
import org.jboss.weld.resolution.EventTypeAssignabilityRules;
import org.jboss.weld.util.reflection.GenericArrayTypeImpl;
import org.junit.Test;
@SuppressWarnings("serial")
public class EventTypeAssignabilityTest {
protected AssignabilityRules getRules() {
return EventTypeAssignabilityRules.instance();
}
@Test
public <E> void testTypeVariableMatchesFoo() throws Exception {
Type fooType = Foo.class;
Type variableType = new TypeLiteral<E>(){}.getType();
Assert.assertTrue("E should be assignable from Foo", getRules().matches(variableType, fooType));
}
@Test
public <E> void testVariableFooMatchesStringFoo() throws Exception {
Type stringFooType = new TypeLiteral<Foo<String>>(){}.getType();
Type variableFooType = new TypeLiteral<Foo<E>>(){}.getType();
Assert.assertTrue("Foo<E> should be assignable from Foo<String>", getRules().matches(variableFooType, stringFooType));
}
@Test
public <E> void testVariableFooArrayMatchesStringFooArray() throws Exception {
Type stringFooArrayType = new TypeLiteral<Foo<String>[]>(){}.getType();
Type variableFooArrayType = new TypeLiteral<Foo<E>[]>(){}.getType();
Assert.assertTrue("Foo<E>[] should be assignable from Foo<String>[]", getRules().matches(variableFooArrayType, stringFooArrayType));
}
@Test
public <F extends Number> void testParameterizedBeanWithBoundedVariableTypeParameter() throws Exception {
Assert.assertTrue("Foo<F extends Number> should be assignable to Foo",
getRules().matches(
Foo.class,
new TypeLiteral<Foo<F>>() { }.getType()));
}
@Test
public void testFooArrayMatchesItself() throws Exception {
Type clazz = Foo[].class;
Type genericArrayType = new TypeLiteral<Foo[]>(){}.getType();
Assert.assertTrue("array should match itself", getRules().matches(clazz, clazz));
Assert.assertTrue("array should match itself", getRules().matches(genericArrayType, genericArrayType));
Assert.assertTrue("array should match itself", getRules().matches(genericArrayType, clazz));
Assert.assertTrue("array should match itself", getRules().matches(clazz, genericArrayType));
}
@Test
public void testWildcardMatchesParameterizedType() {
Type eventType = new TypeLiteral<Foo<List<String>>>() {}.getType();
Type observerType1 = new TypeLiteral<Foo<? extends List>>() {}.getType();
Type observerType2 = new TypeLiteral<Foo<?>>() {}.getType();
Type observerType3 = new TypeLiteral<Foo<? extends List<String>>>() {}.getType();
assertTrue("Foo<? extends List> should be assignable from Foo<List<String>>", getRules().matches(observerType1, eventType));
assertTrue("Foo<?> should be assignable from Foo<List<String>>", getRules().matches(observerType2, eventType));
assertTrue("Foo<? extends List<String> should be assignable from Foo<List<String>>", getRules().matches(observerType3, eventType));
}
@Test
public void testParameterizedTypes() {
Type observerType = new TypeLiteral<Foo<Number>>() {}.getType();
Type eventType1 = new TypeLiteral<Foo<Number>>() {}.getType();
Type eventType2 = new TypeLiteral<Foo<Integer>>() {}.getType();
assertTrue("Foo<Number> should be assignable to Foo<Number>", getRules().matches(observerType, eventType1));
assertFalse("Foo<Integer> should not be assignable to Foo<Number>", getRules().matches(observerType, eventType2));
}
@Test
public void testArrayCovariance1() {
Type type1 = new Number[0].getClass();
Type type2 = new Integer[0].getClass();
assertTrue(getRules().matches(type1, type2));
}
@Test
public void testArrayCovariance2() {
Type type1 = new GenericArrayTypeImpl(new TypeLiteral<List<?>>() {
}.getType());
Type type2 = new List[0].getClass();
assertTrue(getRules().matches(type2, type1));
}
@Test
public void testBoxingNotAppliedOnArrays() {
Type type1 = new int[0].getClass();
Type type2 = new Integer[0].getClass();
assertFalse(getRules().matches(type1, type2));
}
@Test
public void testWildcardFooArrayMatchesStringFooArray() throws Exception {
Type stringFooArrayType = new TypeLiteral<Foo<String>[]>() {
}.getType();
Type wildcardFooArrayType = new TypeLiteral<Foo<?>[]>() {
}.getType();
Assert.assertTrue("Foo<?>[] should not be assignable from Foo<String>[]", getRules().matches(wildcardFooArrayType, stringFooArrayType));
}
@Test
public void testIntegerFooMatchesItself() throws Exception {
Type type = new TypeLiteral<Foo<Integer>>() {
}.getType();
Assert.assertTrue("type should match itself", getRules().matches(type, type));
}
@Test
public void testIntegerFooDoesNotMatchStringFoo() throws Exception {
Type type1 = new TypeLiteral<Foo<Integer>>() {
}.getType();
Type type2 = new TypeLiteral<Foo<String>>() {
}.getType();
Assert.assertFalse("Foo<Integer> should not match Foo<String>", getRules().matches(type1, type2));
}
@Test
public void testFooMatchesItself() throws Exception {
Type type = Foo.class;
Assert.assertTrue("type should match itself", getRules().matches(type, type));
}
@Test
public void testParameterizedArrayDoesNotMatchComponentOfArray() throws Exception {
Type arrayType = new TypeLiteral<Foo<String>[]>() {
}.getType();
Type componentType = new TypeLiteral<Foo<String>>() {
}.getType();
Assert.assertFalse("array type should not match its component type", getRules().matches(arrayType, componentType));
}
@Test
public void testParameterizedArrayMatches() throws Exception {
Type type = new TypeLiteral<Foo<Integer>[]>() {
}.getType();
Assert.assertTrue("type should match itself", getRules().matches(type, type));
}
@Test
public void testArraysDontMatch() throws Exception {
Type type1 = new TypeLiteral<Foo<Integer>[]>() {
}.getType();
Type type2 = new TypeLiteral<Foo<String>[]>() {
}.getType();
Assert.assertFalse("Foo<Integer>[] should not match Foo<String>[]", getRules().matches(type1, type2));
}
@Test
public void testWildcardFooMatchesStringFoo() throws Exception {
Type stringFooType = new TypeLiteral<Foo<String>>() {
}.getType();
Type wildcardFooType = new TypeLiteral<Foo<?>>() {
}.getType();
Assert.assertTrue("Foo<?> should be assignable from Foo<String>", getRules().matches(wildcardFooType, stringFooType));
}
@Test
public void testStringFooArrayDoesNotMatchWildcardFooArray() throws Exception {
Type stringFooArrayType = new TypeLiteral<Foo<String>[]>() {
}.getType();
Type wildcardFooArrayType = new TypeLiteral<Foo<?>[]>() {
}.getType();
Assert.assertFalse("Foo<String>[] should not be assignable from Foo<?>[]", getRules().matches(stringFooArrayType, wildcardFooArrayType));
}
@Test
public void testRawRequiredTypeMatchesParameterizedBeanWithObjectTypeParameter() throws Exception {
Assert.assertTrue("Foo<Object> should be assignable to Foo", getRules().matches(Foo.class, new TypeLiteral<Foo<Object>>() {
}.getType()));
}
@Test
public <T, S extends Integer> void testArrays() {
Assert.assertTrue("int[][] should be assignable to int[][]", getRules().matches(new int[0][].getClass(), new int[0][].getClass()));
Assert.assertTrue("Integer[][] should be assignable to Integer[][]", getRules().matches(new Integer[0][].getClass(), new Integer[0][].getClass()));
Assert.assertTrue("Integer[][] should be assignable to Number[][]", getRules().matches(new Number[0][].getClass(), new Integer[0][].getClass()));
Assert.assertTrue("Integer[][] should be assignable to T[]", getRules().matches(new TypeLiteral<T[]>() {}.getType(), new Integer[0][].getClass()));
Assert.assertTrue("Integer[][] should be assignable to T[][]", getRules().matches(new TypeLiteral<T[][]>() {}.getType(), new Integer[0][].getClass()));
Assert.assertFalse("Integer[][] should not be assignable to S[] where S extends Integer", getRules().matches(new TypeLiteral<S[]>() {}.getType(), new Integer[0][].getClass()));
Assert.assertTrue("Integer[][] should be assignable to S[][] where S extends Integer", getRules().matches(new TypeLiteral<S[][]>() {}.getType(), new Integer[0][].getClass()));
Assert.assertFalse("Number[][] should not be assignable to S[][] where S extends Integer", getRules().matches(new TypeLiteral<S[][]>() {}.getType(), new Number[0][].getClass()));
}
@Test
public void testArrayBoxing() {
/*
* This is not explicitly said in the CDI spec however Java SE does not support array boxing so neither should CDI.
*/
Assert.assertFalse("Integer[] should not be assignable to int[]", getRules().matches(new int[0].getClass(), new Integer[0].getClass()));
Assert.assertFalse("int[] should not be assignable to Integer[]", getRules().matches(new Integer[0].getClass(), new int[0].getClass()));
}
@Test
public <T1 extends Number, T2 extends T1> void testTypeVariableWithTypeVariableBound() {
Assert.assertTrue("Number should be assignable to T2 extends T1 extends Number", getRules().matches(new TypeLiteral<T2>() {}.getType(), Number.class));
Assert.assertFalse("Number should not be assignable to T2 extends T1 extends Runnable", getRules().matches(new TypeLiteral<T2>() {}.getType(), Runnable.class));
}
@Test
public <T1 extends Number, T2 extends T1> void testWildcardWithTypeVariableBound() {
Assert.assertTrue("List<Number> should be assignable to List<? extends T2 extends T1 extends Number>", getRules().matches(new TypeLiteral<List<? extends T2>>() {}.getType(), new TypeLiteral<List<Number>>() {}.getType()));
Assert.assertTrue("List<Integer> should be assignable to List<? extends T2 extends T1 extends Number>", getRules().matches(new TypeLiteral<List<? extends T2>>() {}.getType(), new TypeLiteral<List<Integer>>() {}.getType()));
Assert.assertFalse("List<Object> should not be assignable to List<? extends T2 extends T1 extends Number>", getRules().matches(new TypeLiteral<List<? extends T2>>() {}.getType(), new TypeLiteral<List<Object>>() {}.getType()));
Assert.assertTrue("List<Number> should be assignable to List<? super T2 extends T1 extends Number>", getRules().matches(new TypeLiteral<List<? super T2>>() {}.getType(), new TypeLiteral<List<Number>>() {}.getType()));
Assert.assertFalse("List<Integer> should not be assignable to List<? super T2 extends T1 extends Number>", getRules().matches(new TypeLiteral<List<? super T2>>() {}.getType(), new TypeLiteral<List<Integer>>() {}.getType()));
Assert.assertTrue("List<Object> should be assignable to List<? super T2 extends T1 extends Number>", getRules().matches(new TypeLiteral<List<? super T2>>() {}.getType(), new TypeLiteral<List<Object>>() {}.getType()));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.protocol.stomp.v11;
import javax.security.cert.X509Certificate;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.activemq.artemis.core.protocol.stomp.ActiveMQStompException;
import org.apache.activemq.artemis.core.protocol.stomp.FrameEventListener;
import org.apache.activemq.artemis.core.protocol.stomp.SimpleBytes;
import org.apache.activemq.artemis.core.protocol.stomp.Stomp;
import org.apache.activemq.artemis.core.protocol.stomp.StompConnection;
import org.apache.activemq.artemis.core.protocol.stomp.StompDecoder;
import org.apache.activemq.artemis.core.protocol.stomp.StompFrame;
import org.apache.activemq.artemis.core.protocol.stomp.VersionedStompFrameHandler;
import org.apache.activemq.artemis.core.remoting.impl.netty.NettyConnection;
import org.apache.activemq.artemis.core.remoting.impl.netty.TransportConstants;
import org.apache.activemq.artemis.core.remoting.server.impl.RemotingServiceImpl;
import org.apache.activemq.artemis.core.server.ActiveMQScheduledComponent;
import org.apache.activemq.artemis.core.server.ActiveMQServerLogger;
import org.apache.activemq.artemis.spi.core.protocol.ConnectionEntry;
import org.apache.activemq.artemis.utils.CertificateUtil;
import org.apache.activemq.artemis.utils.ExecutorFactory;
import static org.apache.activemq.artemis.core.protocol.stomp.ActiveMQStompProtocolMessageBundle.BUNDLE;
public class StompFrameHandlerV11 extends VersionedStompFrameHandler implements FrameEventListener {
protected static final char ESC_CHAR = '\\';
private HeartBeater heartBeater;
public StompFrameHandlerV11(StompConnection connection,
ScheduledExecutorService scheduledExecutorService,
ExecutorFactory executorFactory) {
super(connection, scheduledExecutorService, executorFactory);
connection.addStompEventListener(this);
decoder = new StompDecoderV11(this);
decoder.init();
}
public ActiveMQScheduledComponent getHeartBeater() {
return heartBeater;
}
@Override
public StompFrame onConnect(StompFrame frame) {
StompFrame response = null;
Map<String, String> headers = frame.getHeadersMap();
String login = headers.get(Stomp.Headers.Connect.LOGIN);
String passcode = headers.get(Stomp.Headers.Connect.PASSCODE);
String clientID = headers.get(Stomp.Headers.Connect.CLIENT_ID);
String requestID = headers.get(Stomp.Headers.Connect.REQUEST_ID);
X509Certificate[] certificates = null;
if (connection.getTransportConnection() instanceof NettyConnection) {
certificates = CertificateUtil.getCertsFromChannel(((NettyConnection) connection.getTransportConnection()).getChannel());
}
try {
if (connection.validateUser(login, passcode, certificates)) {
connection.setClientID(clientID);
connection.setValid(true);
response = this.createStompFrame(Stomp.Responses.CONNECTED);
// version
response.addHeader(Stomp.Headers.Connected.VERSION, connection.getVersion());
// session
response.addHeader(Stomp.Headers.Connected.SESSION, connection.getID().toString());
// server
response.addHeader(Stomp.Headers.Connected.SERVER, connection.getActiveMQServerName());
if (requestID != null) {
response.addHeader(Stomp.Headers.Connected.RESPONSE_ID, requestID);
}
// heart-beat. We need to start after connected frame has been sent.
// otherwise the client may receive heart-beat before it receives
// connected frame.
String heartBeat = headers.get(Stomp.Headers.Connect.HEART_BEAT);
if (heartBeat != null) {
handleHeartBeat(heartBeat);
if (heartBeater == null) {
response.addHeader(Stomp.Headers.Connected.HEART_BEAT, "0,0");
} else {
response.addHeader(Stomp.Headers.Connected.HEART_BEAT, heartBeater.serverPingPeriod + "," + heartBeater.clientPingResponse);
}
}
} else {
// not valid
response = createStompFrame(Stomp.Responses.ERROR);
response.setNeedsDisconnect(true);
response.addHeader(Stomp.Headers.CONTENT_TYPE, "text/plain");
String responseText = "Security Error occurred: User name [" + login + "] or password is invalid";
response.setBody(responseText);
response.addHeader(Stomp.Headers.Error.MESSAGE, responseText);
}
} catch (ActiveMQStompException e) {
response = e.getFrame();
}
return response;
}
private void handleHeartBeat(String heartBeatHeader) throws ActiveMQStompException {
String[] params = heartBeatHeader.split(",");
if (params.length != 2) {
throw new ActiveMQStompException(connection, "Incorrect heartbeat header " + heartBeatHeader);
}
//client ping
long minPingInterval = Long.valueOf(params[0]);
//client receive ping
long minAcceptInterval = Long.valueOf(params[1]);
if (heartBeater == null) {
heartBeater = new HeartBeater(scheduledExecutorService, executorFactory.getExecutor(), minPingInterval, minAcceptInterval);
}
}
@Override
public StompFrame onDisconnect(StompFrame frame) {
disconnect();
return null;
}
@Override
protected void disconnect() {
if (this.heartBeater != null) {
heartBeater.shutdown();
}
}
@Override
public StompFrame onUnsubscribe(StompFrame request) {
StompFrame response = null;
//unsubscribe in 1.1 only needs id header
String id = request.getHeader(Stomp.Headers.Unsubscribe.ID);
String durableSubscriptionName = request.getHeader(Stomp.Headers.Unsubscribe.DURABLE_SUBSCRIBER_NAME);
if (durableSubscriptionName == null) {
durableSubscriptionName = request.getHeader(Stomp.Headers.Unsubscribe.DURABLE_SUBSCRIPTION_NAME);
}
String subscriptionID = null;
if (id != null) {
subscriptionID = id;
} else if (durableSubscriptionName == null) {
response = BUNDLE.needSubscriptionID().setHandler(this).getFrame();
return response;
}
try {
connection.unsubscribe(subscriptionID, durableSubscriptionName);
} catch (ActiveMQStompException e) {
response = e.getFrame();
}
return response;
}
@Override
public StompFrame onAck(StompFrame request) {
StompFrame response = null;
String messageID = request.getHeader(Stomp.Headers.Ack.MESSAGE_ID);
String txID = request.getHeader(Stomp.Headers.TRANSACTION);
String subscriptionID = request.getHeader(Stomp.Headers.Ack.SUBSCRIPTION);
if (txID != null) {
ActiveMQServerLogger.LOGGER.stompTXAckNorSupported();
}
if (subscriptionID == null) {
response = BUNDLE.needSubscriptionID().setHandler(this).getFrame();
return response;
}
try {
connection.acknowledge(messageID, subscriptionID);
} catch (ActiveMQStompException e) {
response = e.getFrame();
}
return response;
}
@Override
public StompFrame onStomp(StompFrame request) {
if (!connection.isValid()) {
return onConnect(request);
}
return null;
}
@Override
public StompFrame onNack(StompFrame request) {
//this eventually means discard the message (it never be redelivered again).
//we can consider supporting redeliver to a different sub.
return onAck(request);
}
@Override
public void replySent(StompFrame reply) {
if (reply.getCommand().equals(Stomp.Responses.CONNECTED)) {
//kick off the pinger
startHeartBeat();
}
if (reply.needsDisconnect()) {
connection.disconnect(false);
} else {
//update ping
if (heartBeater != null) {
heartBeater.pinged();
}
}
}
private void startHeartBeat() {
if (heartBeater != null && heartBeater.serverPingPeriod != 0) {
heartBeater.start();
}
}
public StompFrame createPingFrame() {
StompFrame frame = createStompFrame(Stomp.Commands.STOMP);
frame.setPing(true);
return frame;
}
/*
* HeartBeater functions:
* (a) server ping: if server hasn't sent any frame within serverPingPeriod interval, send a ping
* (b) configure connection ttl so that org.apache.activemq.artemis.core.remoting.server.impl.RemotingServiceImpl.FailureCheckAndFlushThread
* can deal with closing connections which go stale
*/
private class HeartBeater extends ActiveMQScheduledComponent {
private static final int MIN_SERVER_PING = 500;
long serverPingPeriod = 0;
long clientPingResponse;
volatile boolean shutdown = false;
AtomicLong lastPingTimestamp = new AtomicLong(0);
ConnectionEntry connectionEntry;
private HeartBeater(ScheduledExecutorService scheduledExecutorService,
Executor executor,
final long clientPing,
final long clientAcceptPing) {
super(scheduledExecutorService, executor, clientAcceptPing > MIN_SERVER_PING ? clientAcceptPing : MIN_SERVER_PING, TimeUnit.MILLISECONDS, false);
if (clientAcceptPing != 0) {
serverPingPeriod = super.getPeriod();
}
connectionEntry = ((RemotingServiceImpl) connection.getManager().getServer().getRemotingService()).getConnectionEntry(connection.getID());
if (connectionEntry != null) {
String heartBeatToTtlModifierStr = (String) connection.getAcceptorUsed().getConfiguration().get(TransportConstants.HEART_BEAT_TO_CONNECTION_TTL_MODIFIER);
double heartBeatToTtlModifier = heartBeatToTtlModifierStr == null ? 2 : Double.valueOf(heartBeatToTtlModifierStr);
// the default response to the client
clientPingResponse = (long) (connectionEntry.ttl / heartBeatToTtlModifier);
if (clientPing != 0) {
clientPingResponse = clientPing;
String ttlMaxStr = (String) connection.getAcceptorUsed().getConfiguration().get(TransportConstants.CONNECTION_TTL_MAX);
long ttlMax = ttlMaxStr == null ? Long.MAX_VALUE : Long.valueOf(ttlMaxStr);
String ttlMinStr = (String) connection.getAcceptorUsed().getConfiguration().get(TransportConstants.CONNECTION_TTL_MIN);
long ttlMin = ttlMinStr == null ? 1000 : Long.valueOf(ttlMinStr);
/* The connection's TTL should be one of the following:
* 1) clientPing * heartBeatToTtlModifier
* 2) ttlMin
* 3) ttlMax
*/
long connectionTtl = (long) (clientPing * heartBeatToTtlModifier);
if (connectionTtl < ttlMin) {
connectionTtl = ttlMin;
clientPingResponse = (long) (ttlMin / heartBeatToTtlModifier);
} else if (connectionTtl > ttlMax) {
connectionTtl = ttlMax;
clientPingResponse = (long) (ttlMax / heartBeatToTtlModifier);
}
if (ActiveMQServerLogger.LOGGER.isDebugEnabled()) {
ActiveMQServerLogger.LOGGER.debug("Setting STOMP client TTL to: " + connectionTtl);
}
connectionEntry.ttl = connectionTtl;
}
}
}
public void shutdown() {
this.stop();
}
public void pinged() {
lastPingTimestamp.set(System.currentTimeMillis());
}
@Override
public void run() {
lastPingTimestamp.set(System.currentTimeMillis());
connection.ping(createPingFrame());
}
}
@Override
public void requestAccepted(StompFrame request) {
}
@Override
public StompFrame createStompFrame(String command) {
return new StompFrameV11(command);
}
@Override
public void initDecoder(VersionedStompFrameHandler existingHandler) {
decoder.init(existingHandler.getDecoder());
}
protected class StompDecoderV11 extends StompDecoder {
protected boolean isEscaping = false;
protected SimpleBytes holder = new SimpleBytes(1024);
public StompDecoderV11(StompFrameHandlerV11 handler) {
super(handler);
}
@Override
public void init(StompDecoder decoder) {
this.data = decoder.data;
this.workingBuffer = decoder.workingBuffer;
this.pos = decoder.pos;
this.command = decoder.command;
}
@Override
public void init() {
super.init();
isEscaping = false;
holder.reset();
}
@Override
protected boolean parseCommand() throws ActiveMQStompException {
int offset = 0;
boolean nextChar = false;
//check for ping
// Some badly behaved STOMP clients add a \n *after* the terminating NUL char at the end of the
// STOMP frame this can manifest as an extra \n at the beginning when the
// next STOMP frame is read - we need to deal with this.
// Besides, Stomp 1.2 allows for extra EOLs after NULL (i.e.
// either "[\r]\n"s or "\n"s)
while (true) {
if (workingBuffer[offset] == NEW_LINE) {
//client ping
nextChar = false;
} else if (workingBuffer[offset] == CR) {
if (nextChar)
throw BUNDLE.invalidTwoCRs().setHandler(handler);
nextChar = true;
} else {
break;
}
offset++;
if (offset == data)
return false; //no more bytes
}
if (nextChar) {
throw BUNDLE.badCRs().setHandler(handler);
}
//if some EOLs have been processed, drop those bytes before parsing command
if (offset > 0) {
System.arraycopy(workingBuffer, offset, workingBuffer, 0, data - offset);
data = data - offset;
offset = 0;
}
if (data < 4) {
// Need at least four bytes to identify the command
// - up to 3 bytes for the command name + potentially another byte for a leading \n
return false;
}
byte b = workingBuffer[offset];
switch (b) {
case A: {
if (workingBuffer[offset + 1] == StompDecoder.B) {
if (!tryIncrement(offset + COMMAND_ABORT_LENGTH + eolLen)) {
return false;
}
// ABORT
command = COMMAND_ABORT;
} else {
if (!tryIncrement(offset + COMMAND_ACK_LENGTH + eolLen)) {
return false;
}
// ACK
command = COMMAND_ACK;
}
break;
}
case B: {
if (!tryIncrement(offset + COMMAND_BEGIN_LENGTH + eolLen)) {
return false;
}
// BEGIN
command = COMMAND_BEGIN;
break;
}
case C: {
if (workingBuffer[offset + 2] == M) {
if (!tryIncrement(offset + COMMAND_COMMIT_LENGTH + eolLen)) {
return false;
}
// COMMIT
command = COMMAND_COMMIT;
} else if (workingBuffer[offset + 7] == E) {
if (!tryIncrement(offset + COMMAND_CONNECTED_LENGTH + eolLen)) {
return false;
}
// CONNECTED
command = COMMAND_CONNECTED;
} else {
if (!tryIncrement(offset + COMMAND_CONNECT_LENGTH + eolLen)) {
return false;
}
// CONNECT
command = COMMAND_CONNECT;
}
break;
}
case D: {
if (!tryIncrement(offset + COMMAND_DISCONNECT_LENGTH + eolLen)) {
return false;
}
// DISCONNECT
command = COMMAND_DISCONNECT;
break;
}
case R: {
if (!tryIncrement(offset + COMMAND_RECEIPT_LENGTH + eolLen)) {
return false;
}
// RECEIPT
command = COMMAND_RECEIPT;
break;
}
/**** added by meddy, 27 april 2011, handle header parser for reply to websocket protocol ****/
case E: {
if (!tryIncrement(offset + COMMAND_ERROR_LENGTH + eolLen)) {
return false;
}
// ERROR
command = COMMAND_ERROR;
break;
}
case M: {
if (!tryIncrement(offset + COMMAND_MESSAGE_LENGTH + eolLen)) {
return false;
}
// MESSAGE
command = COMMAND_MESSAGE;
break;
}
/**** end ****/
case S: {
if (workingBuffer[offset + 1] == E) {
if (!tryIncrement(offset + COMMAND_SEND_LENGTH + eolLen)) {
return false;
}
// SEND
command = COMMAND_SEND;
} else if (workingBuffer[offset + 1] == U) {
if (!tryIncrement(offset + COMMAND_SUBSCRIBE_LENGTH + eolLen)) {
return false;
}
// SUBSCRIBE
command = COMMAND_SUBSCRIBE;
} else {
if (!tryIncrement(offset + StompDecoder.COMMAND_STOMP_LENGTH + eolLen)) {
return false;
}
// SUBSCRIBE
command = COMMAND_STOMP;
}
break;
}
case U: {
if (!tryIncrement(offset + COMMAND_UNSUBSCRIBE_LENGTH + eolLen)) {
return false;
}
// UNSUBSCRIBE
command = COMMAND_UNSUBSCRIBE;
break;
}
case N: {
if (!tryIncrement(offset + COMMAND_NACK_LENGTH + eolLen)) {
return false;
}
//NACK
command = COMMAND_NACK;
break;
}
default: {
throwInvalid();
}
}
checkEol();
return true;
}
protected void checkEol() throws ActiveMQStompException {
if (workingBuffer[pos - 1] != NEW_LINE) {
throwInvalid();
}
}
protected void throwUndefinedEscape(byte b) throws ActiveMQStompException {
ActiveMQStompException error = BUNDLE.undefinedEscapeSequence(new String(new char[]{ESC_CHAR, (char) b})).setHandler(handler);
error.setCode(ActiveMQStompException.UNDEFINED_ESCAPE);
throw error;
}
@Override
protected boolean parseHeaders() throws ActiveMQStompException {
outer:
while (true) {
byte b = workingBuffer[pos++];
switch (b) {
//escaping
case ESC_CHAR: {
if (isEscaping) {
//this is a backslash
holder.append(b);
isEscaping = false;
} else {
//begin escaping
isEscaping = true;
}
break;
}
case HEADER_SEPARATOR: {
if (inHeaderName) {
headerName = holder.getString();
holder.reset();
inHeaderName = false;
headerValueWhitespace = true;
}
whiteSpaceOnly = false;
break;
}
case StompDecoder.LN: {
if (isEscaping) {
holder.append(StompDecoder.NEW_LINE);
isEscaping = false;
} else {
holder.append(b);
}
break;
}
case StompDecoder.c: {
if (isEscaping) {
holder.append(StompDecoder.HEADER_SEPARATOR);
isEscaping = false;
} else {
holder.append(b);
}
break;
}
case StompDecoder.NEW_LINE: {
if (whiteSpaceOnly) {
// Headers are terminated by a blank line
readingHeaders = false;
break outer;
}
String headerValue = holder.getString();
holder.reset();
headers.put(headerName, headerValue);
if (headerName.equals(Stomp.Headers.CONTENT_LENGTH)) {
contentLength = Integer.parseInt(headerValue);
}
if (headerName.equals(Stomp.Headers.CONTENT_TYPE)) {
contentType = headerValue;
}
whiteSpaceOnly = true;
inHeaderName = true;
headerValueWhitespace = false;
break;
}
default: {
whiteSpaceOnly = false;
headerValueWhitespace = false;
if (isEscaping) {
throwUndefinedEscape(b);
}
holder.append(b);
}
}
if (pos == data) {
// Run out of data
return false;
}
}
return true;
}
@Override
protected StompFrame parseBody() throws ActiveMQStompException {
byte[] content = null;
if (contentLength != -1) {
if (pos + contentLength + 1 > data) {
// Need more bytes
} else {
content = new byte[contentLength];
System.arraycopy(workingBuffer, pos, content, 0, contentLength);
pos += contentLength;
//drain all the rest
if (bodyStart == -1) {
bodyStart = pos;
}
while (pos < data) {
if (workingBuffer[pos++] == 0) {
break;
}
}
}
} else {
// Need to scan for terminating NUL
if (bodyStart == -1) {
bodyStart = pos;
}
while (pos < data) {
if (workingBuffer[pos++] == 0) {
content = new byte[pos - bodyStart - 1];
System.arraycopy(workingBuffer, bodyStart, content, 0, content.length);
break;
}
}
}
if (content != null) {
if (data > pos) {
if (workingBuffer[pos] == NEW_LINE)
pos++;
if (data > pos)
// More data still in the buffer from the next packet
System.arraycopy(workingBuffer, pos, workingBuffer, 0, data - pos);
}
data = data - pos;
// reset
StompFrame ret = new StompFrameV11(command, headers, content);
init();
return ret;
} else {
return null;
}
}
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableList;
import com.google.common.io.ByteStreams;
import com.google.devtools.build.android.AndroidResourceProcessor.AaptConfigOptions;
import com.google.devtools.build.android.AndroidResourceProcessor.FlagAaptOptions;
import com.google.devtools.build.android.Converters.ExistingPathConverter;
import com.google.devtools.build.android.Converters.PathConverter;
import com.google.devtools.build.android.Converters.PathListConverter;
import com.google.devtools.common.options.Converters.CommaSeparatedOptionListConverter;
import com.google.devtools.common.options.Option;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsParser;
import com.android.builder.core.VariantConfiguration;
import com.android.ide.common.xml.AndroidManifestParser;
import com.android.ide.common.xml.ManifestData;
import com.android.io.StreamException;
import com.android.utils.StdLogger;
import org.xml.sax.SAXException;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.xml.parsers.ParserConfigurationException;
/**
* An action to perform resource shrinking using the Gradle resource shrinker.
*
* <pre>
* Example Usage:
* java/com/google/build/android/ResourceShrinkerAction
* --aapt path to sdk/aapt
* --annotationJar path to sdk/annotationJar
* --androidJar path to sdk/androidJar
* --shrunkJar path to proguard dead code removal jar
* --resources path to processed resources zip
* --rTxt path to processed resources R.txt
* --primaryManifest path to processed resources AndroidManifest.xml
* --dependencyManifests paths to dependency library manifests
* --shrunkResourceApk path to write shrunk ap_
* --shrunkResources path to write shrunk resources zip
* </pre>
*/
public class ResourceShrinkerAction {
private static final StdLogger stdLogger = new StdLogger(StdLogger.Level.WARNING);
private static final Logger logger = Logger.getLogger(ResourceShrinkerAction.class.getName());
/** Flag specifications for this action. */
public static final class Options extends OptionsBase {
@Option(name = "shrunkJar",
defaultValue = "null",
category = "input",
converter = ExistingPathConverter.class,
help = "Path to the shrunk jar from a Proguard run with shrinking enabled.")
public Path shrunkJar;
@Option(name = "resources",
defaultValue = "null",
category = "input",
converter = ExistingPathConverter.class,
help = "Path to the resources zip to be shrunk.")
public Path resourcesZip;
@Option(name = "rTxt",
defaultValue = "null",
category = "input",
converter = ExistingPathConverter.class,
help = "Path to the R.txt of the complete resource tree.")
public Path rTxt;
@Option(name = "primaryManifest",
defaultValue = "null",
category = "input",
converter = ExistingPathConverter.class,
help = "Path to the primary manifest for the resources to be shrunk.")
public Path primaryManifest;
@Option(name = "dependencyManifests",
defaultValue = "",
category = "input",
converter = PathListConverter.class,
help = "A list of paths to the manifests of the dependencies.")
public List<Path> dependencyManifests;
@Option(name = "resourcePackages",
defaultValue = "",
category = "input",
converter = CommaSeparatedOptionListConverter.class,
help = "A list of packages that resources have been generated for.")
public List<String> resourcePackages;
@Option(name = "shrunkResourceApk",
defaultValue = "null",
category = "output",
converter = PathConverter.class,
help = "Path to where the shrunk resource.ap_ should be written.")
public Path shrunkApk;
@Option(name = "shrunkResources",
defaultValue = "null",
category = "output",
converter = PathConverter.class,
help = "Path to where the shrunk resource.ap_ should be written.")
public Path shrunkResources;
@Option(name = "log",
defaultValue = "null",
category = "output",
converter = PathConverter.class,
help = "Path to where the shrinker log should be written.")
public Path log;
}
private static AaptConfigOptions aaptConfigOptions;
private static Options options;
private static String getManifestPackage(Path manifest)
throws SAXException, IOException, StreamException, ParserConfigurationException {
ManifestData manifestData = AndroidManifestParser.parse(Files.newInputStream(manifest));
return manifestData.getPackage();
}
private static Set<String> getManifestPackages(Path primaryManifest, List<Path> otherManifests)
throws SAXException, IOException, StreamException, ParserConfigurationException {
Set<String> manifestPackages = new HashSet<>();
manifestPackages.add(getManifestPackage(primaryManifest));
for (Path manifest : otherManifests) {
manifestPackages.add(getManifestPackage(manifest));
}
return manifestPackages;
}
public static void main(String[] args) throws Exception {
final Stopwatch timer = Stopwatch.createStarted();
// Parse arguments.
OptionsParser optionsParser = OptionsParser.newOptionsParser(
Options.class, AaptConfigOptions.class);
optionsParser.parseAndExitUponError(args);
aaptConfigOptions = optionsParser.getOptions(AaptConfigOptions.class);
options = optionsParser.getOptions(Options.class);
AndroidResourceProcessor resourceProcessor = new AndroidResourceProcessor(stdLogger);
// Setup temporary working directories.
try (ScopedTemporaryDirectory scopedTmp =
new ScopedTemporaryDirectory("resource_shrinker_tmp")) {
Path working = scopedTmp.getPath();
final Path resourceFiles = working.resolve("resource_files");
final Path shrunkResources = working.resolve("shrunk_resources");
// Gather package list from manifests.
Set<String> resourcePackages = getManifestPackages(
options.primaryManifest, options.dependencyManifests);
resourcePackages.addAll(options.resourcePackages);
// Expand resource files zip into working directory.
try (ZipInputStream zin = new ZipInputStream(
new FileInputStream(options.resourcesZip.toFile()))) {
ZipEntry entry;
while ((entry = zin.getNextEntry()) != null) {
if (!entry.isDirectory()) {
Path output = resourceFiles.resolve(entry.getName());
Files.createDirectories(output.getParent());
try (FileOutputStream fos = new FileOutputStream(output.toFile())) {
ByteStreams.copy(zin, fos);
}
}
}
}
// Shrink resources.
ResourceShrinker resourceShrinker = new ResourceShrinker(
resourcePackages,
options.rTxt,
options.shrunkJar,
options.primaryManifest,
resourceFiles.resolve("res"),
options.log);
resourceShrinker.shrink(shrunkResources);
logger.fine(String.format("Shrinking resources finished at %sms",
timer.elapsed(TimeUnit.MILLISECONDS)));
// Build ap_ with shrunk resources.
resourceProcessor.processResources(
aaptConfigOptions.aapt,
aaptConfigOptions.androidJar,
aaptConfigOptions.buildToolsVersion,
VariantConfiguration.Type.DEFAULT,
aaptConfigOptions.debug,
null /* packageForR */,
new FlagAaptOptions(aaptConfigOptions),
aaptConfigOptions.resourceConfigs,
ImmutableList.<String>of() /* splits */,
new MergedAndroidData(
shrunkResources, resourceFiles.resolve("assets"), options.primaryManifest),
ImmutableList.<DependencyAndroidData>of() /* libraries */,
null /* sourceOutputDir */,
options.shrunkApk,
null /* proguardOutput */,
null /* mainDexProguardOutput */,
null /* publicResourcesOut */);
if (options.shrunkResources != null) {
resourceProcessor.createResourcesZip(shrunkResources, resourceFiles.resolve("assets"),
options.shrunkResources);
}
logger.fine(String.format("Packing resources finished at %sms",
timer.elapsed(TimeUnit.MILLISECONDS)));
} catch (Exception e) {
logger.log(Level.SEVERE, "Error shrinking resources", e);
throw e;
} finally {
resourceProcessor.shutdown();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.engine;
import com.google.common.base.Preconditions;
import org.apache.lucene.index.*;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SearcherManager;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Base64;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy;
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.Translog;
import java.io.Closeable;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
*
*/
public abstract class Engine implements Closeable {
public static final String SYNC_COMMIT_ID = "sync_id";
protected final ShardId shardId;
protected final ESLogger logger;
protected final EngineConfig engineConfig;
protected final Store store;
protected final AtomicBoolean isClosed = new AtomicBoolean(false);
protected final FailedEngineListener failedEngineListener;
protected final SnapshotDeletionPolicy deletionPolicy;
protected final ReentrantLock failEngineLock = new ReentrantLock();
protected final ReentrantReadWriteLock rwl = new ReentrantReadWriteLock();
protected final ReleasableLock readLock = new ReleasableLock(rwl.readLock());
protected final ReleasableLock writeLock = new ReleasableLock(rwl.writeLock());
protected volatile Throwable failedEngine = null;
protected Engine(EngineConfig engineConfig) {
Preconditions.checkNotNull(engineConfig.getStore(), "Store must be provided to the engine");
Preconditions.checkNotNull(engineConfig.getDeletionPolicy(), "Snapshot deletion policy must be provided to the engine");
this.engineConfig = engineConfig;
this.shardId = engineConfig.getShardId();
this.store = engineConfig.getStore();
this.logger = Loggers.getLogger(Engine.class, // we use the engine class directly here to make sure all subclasses have the same logger name
engineConfig.getIndexSettings(), engineConfig.getShardId());
this.failedEngineListener = engineConfig.getFailedEngineListener();
this.deletionPolicy = engineConfig.getDeletionPolicy();
}
/** Returns 0 in the case where accountable is null, otherwise returns {@code ramBytesUsed()} */
protected static long guardedRamBytesUsed(Accountable a) {
if (a == null) {
return 0;
}
return a.ramBytesUsed();
}
/**
* Tries to extract a segment reader from the given index reader.
* If no SegmentReader can be extracted an {@link IllegalStateException} is thrown.
*/
protected static SegmentReader segmentReader(LeafReader reader) {
if (reader instanceof SegmentReader) {
return (SegmentReader) reader;
} else if (reader instanceof FilterLeafReader) {
final FilterLeafReader fReader = (FilterLeafReader) reader;
return segmentReader(FilterLeafReader.unwrap(fReader));
}
// hard fail - we can't get a SegmentReader
throw new IllegalStateException("Can not extract segment reader from given index reader [" + reader + "]");
}
/**
* Returns whether a leaf reader comes from a merge (versus flush or addIndexes).
*/
protected static boolean isMergedSegment(LeafReader reader) {
// We expect leaves to be segment readers
final Map<String, String> diagnostics = segmentReader(reader).getSegmentInfo().info.getDiagnostics();
final String source = diagnostics.get(IndexWriter.SOURCE);
assert Arrays.asList(IndexWriter.SOURCE_ADDINDEXES_READERS, IndexWriter.SOURCE_FLUSH,
IndexWriter.SOURCE_MERGE).contains(source) : "Unknown source " + source;
return IndexWriter.SOURCE_MERGE.equals(source);
}
protected Searcher newSearcher(String source, IndexSearcher searcher, SearcherManager manager) {
return new EngineSearcher(source, searcher, manager, store, logger);
}
public final EngineConfig config() {
return engineConfig;
}
protected abstract SegmentInfos getLastCommittedSegmentInfos();
public MergeStats getMergeStats() {
return new MergeStats();
}
/** A throttling class that can be activated, causing the
* {@code acquireThrottle} method to block on a lock when throttling
* is enabled
*/
protected static final class IndexThrottle {
private static final ReleasableLock NOOP_LOCK = new ReleasableLock(new NoOpLock());
private final ReleasableLock lockReference = new ReleasableLock(new ReentrantLock());
private volatile ReleasableLock lock = NOOP_LOCK;
public Releasable acquireThrottle() {
return lock.acquire();
}
/** Activate throttling, which switches the lock to be a real lock */
public void activate() {
assert lock == NOOP_LOCK : "throttling activated while already active";
lock = lockReference;
}
/** Deactivate throttling, which switches the lock to be an always-acquirable NoOpLock */
public void deactivate() {
assert lock != NOOP_LOCK : "throttling deactivated but not active";
lock = NOOP_LOCK;
}
}
/** A Lock implementation that always allows the lock to be acquired */
protected static final class NoOpLock implements Lock {
@Override
public void lock() {
}
@Override
public void lockInterruptibly() throws InterruptedException {
}
@Override
public boolean tryLock() {
return true;
}
@Override
public boolean tryLock(long time, TimeUnit unit) throws InterruptedException {
return true;
}
@Override
public void unlock() {
}
@Override
public Condition newCondition() {
throw new UnsupportedOperationException("NoOpLock can't provide a condition");
}
}
public abstract void create(Create create) throws EngineException;
public abstract boolean index(Index index) throws EngineException;
public abstract void delete(Delete delete) throws EngineException;
/** @deprecated This was removed, but we keep this API so translog can replay any DBQs on upgrade. */
@Deprecated
public abstract void delete(DeleteByQuery delete) throws EngineException;
/**
* Attempts to do a special commit where the given syncID is put into the commit data. The attempt
* succeeds if there are not pending writes in lucene and the current point is equal to the expected one.
* @param syncId id of this sync
* @param expectedCommitId the expected value of
* @return true if the sync commit was made, false o.w.
*/
public abstract SyncedFlushResult syncFlush(String syncId, CommitId expectedCommitId) throws EngineException;
public enum SyncedFlushResult {
SUCCESS,
COMMIT_MISMATCH,
PENDING_OPERATIONS
}
final protected GetResult getFromSearcher(Get get) throws EngineException {
final Searcher searcher = acquireSearcher("get");
final Versions.DocIdAndVersion docIdAndVersion;
try {
docIdAndVersion = Versions.loadDocIdAndVersion(searcher.reader(), get.uid());
} catch (Throwable e) {
Releasables.closeWhileHandlingException(searcher);
//TODO: A better exception goes here
throw new EngineException(shardId, "Couldn't resolve version", e);
}
if (docIdAndVersion != null) {
if (get.versionType().isVersionConflictForReads(docIdAndVersion.version, get.version())) {
Releasables.close(searcher);
Uid uid = Uid.createUid(get.uid().text());
throw new VersionConflictEngineException(shardId, uid.type(), uid.id(), docIdAndVersion.version, get.version());
}
}
if (docIdAndVersion != null) {
// don't release the searcher on this path, it is the
// responsibility of the caller to call GetResult.release
return new GetResult(searcher, docIdAndVersion);
} else {
Releasables.close(searcher);
return GetResult.NOT_EXISTS;
}
}
public abstract GetResult get(Get get) throws EngineException;
/**
* Returns a new searcher instance. The consumer of this
* API is responsible for releasing the returned seacher in a
* safe manner, preferably in a try/finally block.
*
* @see Searcher#close()
*/
public final Searcher acquireSearcher(String source) throws EngineException {
boolean success = false;
/* Acquire order here is store -> manager since we need
* to make sure that the store is not closed before
* the searcher is acquired. */
store.incRef();
try {
final SearcherManager manager = getSearcherManager(); // can never be null
/* This might throw NPE but that's fine we will run ensureOpen()
* in the catch block and throw the right exception */
final IndexSearcher searcher = manager.acquire();
try {
final Searcher retVal = newSearcher(source, searcher, manager);
success = true;
return retVal;
} finally {
if (!success) {
manager.release(searcher);
}
}
} catch (EngineClosedException ex) {
throw ex;
} catch (Throwable ex) {
ensureOpen(); // throw EngineCloseException here if we are already closed
logger.error("failed to acquire searcher, source {}", ex, source);
throw new EngineException(shardId, "failed to acquire searcher, source " + source, ex);
} finally {
if (!success) { // release the ref in the case of an error...
store.decRef();
}
}
}
/** returns the translog for this engine */
public abstract Translog getTranslog();
protected void ensureOpen() {
if (isClosed.get()) {
throw new EngineClosedException(shardId, failedEngine);
}
}
/** get commits stats for the last commit */
public CommitStats commitStats() {
return new CommitStats(getLastCommittedSegmentInfos());
}
/**
* Read the last segments info from the commit pointed to by the searcher manager
*/
protected static SegmentInfos readLastCommittedSegmentInfos(final SearcherManager sm, final Store store) throws IOException {
IndexSearcher searcher = sm.acquire();
try {
IndexCommit latestCommit = ((DirectoryReader) searcher.getIndexReader()).getIndexCommit();
return Lucene.readSegmentInfos(latestCommit);
} catch (IOException e) {
// Fall back to reading from the store if reading from the commit fails
try {
return store. readLastCommittedSegmentsInfo();
} catch (IOException e2) {
e2.addSuppressed(e);
throw e2;
}
} finally {
sm.release(searcher);
}
}
/**
* Global stats on segments.
*/
public final SegmentsStats segmentsStats() {
ensureOpen();
try (final Searcher searcher = acquireSearcher("segments_stats")) {
SegmentsStats stats = new SegmentsStats();
for (LeafReaderContext reader : searcher.reader().leaves()) {
final SegmentReader segmentReader = segmentReader(reader.reader());
stats.add(1, segmentReader.ramBytesUsed());
stats.addTermsMemoryInBytes(guardedRamBytesUsed(segmentReader.getPostingsReader()));
stats.addStoredFieldsMemoryInBytes(guardedRamBytesUsed(segmentReader.getFieldsReader()));
stats.addTermVectorsMemoryInBytes(guardedRamBytesUsed(segmentReader.getTermVectorsReader()));
stats.addNormsMemoryInBytes(guardedRamBytesUsed(segmentReader.getNormsReader()));
stats.addDocValuesMemoryInBytes(guardedRamBytesUsed(segmentReader.getDocValuesReader()));
}
writerSegmentStats(stats);
return stats;
}
}
protected void writerSegmentStats(SegmentsStats stats) {
// by default we don't have a writer here... subclasses can override this
stats.addVersionMapMemoryInBytes(0);
stats.addIndexWriterMemoryInBytes(0);
stats.addIndexWriterMaxMemoryInBytes(0);
}
protected Segment[] getSegmentInfo(SegmentInfos lastCommittedSegmentInfos, boolean verbose) {
ensureOpen();
Map<String, Segment> segments = new HashMap<>();
// first, go over and compute the search ones...
Searcher searcher = acquireSearcher("segments");
try {
for (LeafReaderContext reader : searcher.reader().leaves()) {
SegmentCommitInfo info = segmentReader(reader.reader()).getSegmentInfo();
assert !segments.containsKey(info.info.name);
Segment segment = new Segment(info.info.name);
segment.search = true;
segment.docCount = reader.reader().numDocs();
segment.delDocCount = reader.reader().numDeletedDocs();
segment.version = info.info.getVersion();
segment.compound = info.info.getUseCompoundFile();
try {
segment.sizeInBytes = info.sizeInBytes();
} catch (IOException e) {
logger.trace("failed to get size for [{}]", e, info.info.name);
}
final SegmentReader segmentReader = segmentReader(reader.reader());
segment.memoryInBytes = segmentReader.ramBytesUsed();
if (verbose) {
segment.ramTree = Accountables.namedAccountable("root", segmentReader);
}
// TODO: add more fine grained mem stats values to per segment info here
segments.put(info.info.name, segment);
}
} finally {
searcher.close();
}
// now, correlate or add the committed ones...
if (lastCommittedSegmentInfos != null) {
SegmentInfos infos = lastCommittedSegmentInfos;
for (SegmentCommitInfo info : infos) {
Segment segment = segments.get(info.info.name);
if (segment == null) {
segment = new Segment(info.info.name);
segment.search = false;
segment.committed = true;
segment.docCount = info.info.maxDoc();
segment.delDocCount = info.getDelCount();
segment.version = info.info.getVersion();
segment.compound = info.info.getUseCompoundFile();
try {
segment.sizeInBytes = info.sizeInBytes();
} catch (IOException e) {
logger.trace("failed to get size for [{}]", e, info.info.name);
}
segments.put(info.info.name, segment);
} else {
segment.committed = true;
}
}
}
Segment[] segmentsArr = segments.values().toArray(new Segment[segments.values().size()]);
Arrays.sort(segmentsArr, new Comparator<Segment>() {
@Override
public int compare(Segment o1, Segment o2) {
return (int) (o1.getGeneration() - o2.getGeneration());
}
});
return segmentsArr;
}
/**
* The list of segments in the engine.
*/
public abstract List<Segment> segments(boolean verbose);
public final boolean refreshNeeded() {
if (store.tryIncRef()) {
/*
we need to inc the store here since searcherManager.isSearcherCurrent()
acquires a searcher internally and that might keep a file open on the
store. this violates the assumption that all files are closed when
the store is closed so we need to make sure we increment it here
*/
try {
return !getSearcherManager().isSearcherCurrent();
} catch (IOException e) {
logger.error("failed to access searcher manager", e);
failEngine("failed to access searcher manager", e);
throw new EngineException(shardId, "failed to access searcher manager", e);
} finally {
store.decRef();
}
}
return false;
}
/**
* Refreshes the engine for new search operations to reflect the latest
* changes.
*/
public abstract void refresh(String source) throws EngineException;
/**
* Flushes the state of the engine including the transaction log, clearing memory.
* @param force if <code>true</code> a lucene commit is executed even if no changes need to be committed.
* @param waitIfOngoing if <code>true</code> this call will block until all currently running flushes have finished.
* Otherwise this call will return without blocking.
* @return the commit Id for the resulting commit
*/
public abstract CommitId flush(boolean force, boolean waitIfOngoing) throws EngineException;
/**
* Flushes the state of the engine including the transaction log, clearing memory and persisting
* documents in the lucene index to disk including a potentially heavy and durable fsync operation.
* This operation is not going to block if another flush operation is currently running and won't write
* a lucene commit if nothing needs to be committed.
*
* @return the commit Id for the resulting commit
*/
public abstract CommitId flush() throws EngineException;
/**
* Optimizes to 1 segment
*/
public void forceMerge(boolean flush) {
forceMerge(flush, 1, false, false, false);
}
/**
* Triggers a forced merge on this engine
*/
public abstract void forceMerge(boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, boolean upgrade, boolean upgradeOnlyAncientSegments) throws EngineException;
/**
* Snapshots the index and returns a handle to it. If needed will try and "commit" the
* lucene index to make sure we have a "fresh" copy of the files to snapshot.
*
* @param flushFirst indicates whether the engine should flush before returning the snapshot
*/
public abstract SnapshotIndexCommit snapshotIndex(boolean flushFirst) throws EngineException;
/**
* fail engine due to some error. the engine will also be closed.
* The underlying store is marked corrupted iff failure is caused by index corruption
*/
public void failEngine(String reason, @Nullable Throwable failure) {
if (failEngineLock.tryLock()) {
store.incRef();
try {
try {
// we just go and close this engine - no way to recover
closeNoLock("engine failed on: [" + reason + "]");
} finally {
if (failedEngine != null) {
logger.debug("tried to fail engine but engine is already failed. ignoring. [{}]", reason, failure);
return;
}
logger.warn("failed engine [{}]", failure, reason);
// we must set a failure exception, generate one if not supplied
failedEngine = (failure != null) ? failure : new IllegalStateException(reason);
// we first mark the store as corrupted before we notify any listeners
// this must happen first otherwise we might try to reallocate so quickly
// on the same node that we don't see the corrupted marker file when
// the shard is initializing
if (Lucene.isCorruptionException(failure)) {
try {
store.markStoreCorrupted(new IOException("failed engine (reason: [" + reason + "])", ExceptionsHelper.unwrapCorruption(failure)));
} catch (IOException e) {
logger.warn("Couldn't mark store corrupted", e);
}
}
failedEngineListener.onFailedEngine(shardId, reason, failure);
}
} catch (Throwable t) {
// don't bubble up these exceptions up
logger.warn("failEngine threw exception", t);
} finally {
store.decRef();
}
} else {
logger.debug("tried to fail engine but could not acquire lock - engine should be failed by now [{}]", reason, failure);
}
}
/** Check whether the engine should be failed */
protected boolean maybeFailEngine(String source, Throwable t) {
if (Lucene.isCorruptionException(t)) {
failEngine("corrupt file (source: [" + source + "])", t);
return true;
} else if (ExceptionsHelper.isOOM(t)) {
failEngine("out of memory (source: [" + source + "])", t);
return true;
}
return false;
}
/** Wrap a Throwable in an {@code EngineClosedException} if the engine is already closed */
protected Throwable wrapIfClosed(Throwable t) {
if (isClosed.get()) {
if (t != failedEngine && failedEngine != null) {
t.addSuppressed(failedEngine);
}
return new EngineClosedException(shardId, t);
}
return t;
}
public interface FailedEngineListener {
void onFailedEngine(ShardId shardId, String reason, @Nullable Throwable t);
}
public static class Searcher implements Releasable {
private final String source;
private final IndexSearcher searcher;
public Searcher(String source, IndexSearcher searcher) {
this.source = source;
this.searcher = searcher;
}
/**
* The source that caused this searcher to be acquired.
*/
public String source() {
return source;
}
public IndexReader reader() {
return searcher.getIndexReader();
}
public IndexSearcher searcher() {
return searcher;
}
@Override
public void close() {
// Nothing to close here
}
}
public static interface Operation {
static enum Type {
CREATE,
INDEX,
DELETE
}
static enum Origin {
PRIMARY,
REPLICA,
RECOVERY
}
Type opType();
Origin origin();
}
public static abstract class IndexingOperation implements Operation {
private final Term uid;
private final ParsedDocument doc;
private long version;
private final VersionType versionType;
private final Origin origin;
private final boolean canHaveDuplicates;
private Translog.Location location;
private final long startTime;
private long endTime;
public IndexingOperation(Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) {
this.uid = uid;
this.doc = doc;
this.version = version;
this.versionType = versionType;
this.origin = origin;
this.startTime = startTime;
this.canHaveDuplicates = canHaveDuplicates;
}
public IndexingOperation(Term uid, ParsedDocument doc) {
this(uid, doc, Versions.MATCH_ANY, VersionType.INTERNAL, Origin.PRIMARY, System.nanoTime(), true);
}
@Override
public Origin origin() {
return this.origin;
}
public ParsedDocument parsedDoc() {
return this.doc;
}
public Term uid() {
return this.uid;
}
public String type() {
return this.doc.type();
}
public String id() {
return this.doc.id();
}
public String routing() {
return this.doc.routing();
}
public long timestamp() {
return this.doc.timestamp();
}
public long ttl() {
return this.doc.ttl();
}
public long version() {
return this.version;
}
public void updateVersion(long version) {
this.version = version;
this.doc.version().setLongValue(version);
}
public void setTranslogLocation(Translog.Location location) {
this.location = location;
}
public Translog.Location getTranslogLocation() {
return this.location;
}
public VersionType versionType() {
return this.versionType;
}
public boolean canHaveDuplicates() {
return this.canHaveDuplicates;
}
public String parent() {
return this.doc.parent();
}
public List<Document> docs() {
return this.doc.docs();
}
public BytesReference source() {
return this.doc.source();
}
/**
* Returns operation start time in nanoseconds.
*/
public long startTime() {
return this.startTime;
}
public void endTime(long endTime) {
this.endTime = endTime;
}
/**
* Returns operation end time in nanoseconds.
*/
public long endTime() {
return this.endTime;
}
/**
* Execute this operation against the provided {@link IndexShard} and
* return whether the document was created.
*/
public abstract boolean execute(IndexShard shard);
}
public static final class Create extends IndexingOperation {
private final boolean autoGeneratedId;
public Create(Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates, boolean autoGeneratedId) {
super(uid, doc, version, versionType, origin, startTime, canHaveDuplicates);
this.autoGeneratedId = autoGeneratedId;
}
public Create(Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) {
this(uid, doc, version, versionType, origin, startTime, true, false);
}
public Create(Term uid, ParsedDocument doc) {
super(uid, doc);
autoGeneratedId = false;
}
@Override
public Type opType() {
return Type.CREATE;
}
public boolean autoGeneratedId() {
return this.autoGeneratedId;
}
@Override
public boolean execute(IndexShard shard) {
shard.create(this);
return true;
}
}
public static final class Index extends IndexingOperation {
public Index(Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime, boolean canHaveDuplicates) {
super(uid, doc, version, versionType, origin, startTime, canHaveDuplicates);
}
public Index(Term uid, ParsedDocument doc, long version, VersionType versionType, Origin origin, long startTime) {
super(uid, doc, version, versionType, origin, startTime, true);
}
public Index(Term uid, ParsedDocument doc) {
super(uid, doc);
}
@Override
public Type opType() {
return Type.INDEX;
}
@Override
public boolean execute(IndexShard shard) {
return shard.index(this);
}
}
public static class Delete implements Operation {
private final String type;
private final String id;
private final Term uid;
private long version;
private final VersionType versionType;
private final Origin origin;
private boolean found;
private final long startTime;
private long endTime;
private Translog.Location location;
public Delete(String type, String id, Term uid, long version, VersionType versionType, Origin origin, long startTime, boolean found) {
this.type = type;
this.id = id;
this.uid = uid;
this.version = version;
this.versionType = versionType;
this.origin = origin;
this.startTime = startTime;
this.found = found;
}
public Delete(String type, String id, Term uid) {
this(type, id, uid, Versions.MATCH_ANY, VersionType.INTERNAL, Origin.PRIMARY, System.nanoTime(), false);
}
public Delete(Delete template, VersionType versionType) {
this(template.type(), template.id(), template.uid(), template.version(), versionType, template.origin(), template.startTime(), template.found());
}
@Override
public Type opType() {
return Type.DELETE;
}
@Override
public Origin origin() {
return this.origin;
}
public String type() {
return this.type;
}
public String id() {
return this.id;
}
public Term uid() {
return this.uid;
}
public void updateVersion(long version, boolean found) {
this.version = version;
this.found = found;
}
/**
* before delete execution this is the version to be deleted. After this is the version of the "delete" transaction record.
*/
public long version() {
return this.version;
}
public VersionType versionType() {
return this.versionType;
}
public boolean found() {
return this.found;
}
/**
* Returns operation start time in nanoseconds.
*/
public long startTime() {
return this.startTime;
}
public void endTime(long endTime) {
this.endTime = endTime;
}
/**
* Returns operation end time in nanoseconds.
*/
public long endTime() {
return this.endTime;
}
public void setTranslogLocation(Translog.Location location) {
this.location = location;
}
public Translog.Location getTranslogLocation() {
return this.location;
}
}
public static class DeleteByQuery {
private final Query query;
private final BytesReference source;
private final String[] filteringAliases;
private final Query aliasFilter;
private final String[] types;
private final BitDocIdSetFilter parentFilter;
private final Operation.Origin origin;
private final long startTime;
private long endTime;
public DeleteByQuery(Query query, BytesReference source, @Nullable String[] filteringAliases, @Nullable Query aliasFilter, BitDocIdSetFilter parentFilter, Operation.Origin origin, long startTime, String... types) {
this.query = query;
this.source = source;
this.types = types;
this.filteringAliases = filteringAliases;
this.aliasFilter = aliasFilter;
this.parentFilter = parentFilter;
this.startTime = startTime;
this.origin = origin;
}
public Query query() {
return this.query;
}
public BytesReference source() {
return this.source;
}
public String[] types() {
return this.types;
}
public String[] filteringAliases() {
return filteringAliases;
}
public Query aliasFilter() {
return aliasFilter;
}
public boolean nested() {
return parentFilter != null;
}
public BitDocIdSetFilter parentFilter() {
return parentFilter;
}
public Operation.Origin origin() {
return this.origin;
}
/**
* Returns operation start time in nanoseconds.
*/
public long startTime() {
return this.startTime;
}
public DeleteByQuery endTime(long endTime) {
this.endTime = endTime;
return this;
}
/**
* Returns operation end time in nanoseconds.
*/
public long endTime() {
return this.endTime;
}
}
public static class Get {
private final boolean realtime;
private final Term uid;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
public Get(boolean realtime, Term uid) {
this.realtime = realtime;
this.uid = uid;
}
public boolean realtime() {
return this.realtime;
}
public Term uid() {
return uid;
}
public long version() {
return version;
}
public Get version(long version) {
this.version = version;
return this;
}
public VersionType versionType() {
return versionType;
}
public Get versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
}
public static class GetResult {
private final boolean exists;
private final long version;
private final Translog.Source source;
private final Versions.DocIdAndVersion docIdAndVersion;
private final Searcher searcher;
public static final GetResult NOT_EXISTS = new GetResult(false, Versions.NOT_FOUND, null);
public GetResult(boolean exists, long version, @Nullable Translog.Source source) {
this.source = source;
this.exists = exists;
this.version = version;
this.docIdAndVersion = null;
this.searcher = null;
}
public GetResult(Searcher searcher, Versions.DocIdAndVersion docIdAndVersion) {
this.exists = true;
this.source = null;
this.version = docIdAndVersion.version;
this.docIdAndVersion = docIdAndVersion;
this.searcher = searcher;
}
public boolean exists() {
return exists;
}
public long version() {
return this.version;
}
@Nullable
public Translog.Source source() {
return source;
}
public Searcher searcher() {
return this.searcher;
}
public Versions.DocIdAndVersion docIdAndVersion() {
return docIdAndVersion;
}
public void release() {
if (searcher != null) {
searcher.close();
}
}
}
protected abstract SearcherManager getSearcherManager();
/**
* Method to close the engine while the write lock is held.
*/
protected abstract void closeNoLock(String reason);
/**
* Flush the engine (committing segments to disk and truncating the
* translog) and close it.
*/
public void flushAndClose() throws IOException {
if (isClosed.get() == false) {
logger.trace("flushAndClose now acquire writeLock");
try (ReleasableLock lock = writeLock.acquire()) {
logger.trace("flushAndClose now acquired writeLock");
try {
logger.debug("flushing shard on close - this might take some time to sync files to disk");
try {
flush(); // TODO we might force a flush in the future since we have the write lock already even though recoveries are running.
} catch (FlushNotAllowedEngineException ex) {
logger.debug("flush not allowed during flushAndClose - skipping");
} catch (EngineClosedException ex) {
logger.debug("engine already closed - skipping flushAndClose");
}
} finally {
close(); // double close is not a problem
}
}
}
}
@Override
public void close() throws IOException {
if (isClosed.get() == false) { // don't acquire the write lock if we are already closed
logger.debug("close now acquiring writeLock");
try (ReleasableLock lock = writeLock.acquire()) {
logger.debug("close acquired writeLock");
closeNoLock("api");
}
}
}
/**
* Returns <code>true</code> the internal writer has any uncommitted changes. Otherwise <code>false</code>
* @return
*/
public abstract boolean hasUncommittedChanges();
public static class CommitId implements Writeable {
private final byte[] id;
public CommitId(byte[] id) {
assert id != null;
this.id = Arrays.copyOf(id, id.length);
}
public CommitId(StreamInput in) throws IOException {
assert in != null;
this.id = in.readByteArray();
}
@Override
public String toString() {
return Base64.encodeBytes(id);
}
@Override
public CommitId readFrom(StreamInput in) throws IOException {
return new CommitId(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeByteArray(id);
}
public boolean idsEqual(byte[] id) {
return Arrays.equals(id, this.id);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CommitId commitId = (CommitId) o;
if (!Arrays.equals(id, commitId.id)) return false;
return true;
}
@Override
public int hashCode() {
return Arrays.hashCode(id);
}
}
public void onSettingsChanged() {}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.broker.artemiswrapper;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ThreadPoolExecutor;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.config.impl.ConfigurationImpl;
import org.apache.activemq.artemis.core.remoting.impl.invm.InVMAcceptorFactory;
import org.apache.activemq.artemis.core.remoting.impl.netty.NettyAcceptorFactory;
import org.apache.activemq.artemis.core.remoting.impl.netty.NettyConnectorFactory;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.server.ActiveMQServers;
import org.apache.activemq.artemis.core.server.JournalType;
import org.apache.activemq.artemis.core.settings.impl.AddressFullMessagePolicy;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.jlibaio.LibaioContext;
import org.apache.activemq.broker.Broker;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.Connection;
import org.apache.activemq.broker.ConnectionContext;
import org.apache.activemq.broker.ConsumerBrokerExchange;
import org.apache.activemq.broker.ProducerBrokerExchange;
import org.apache.activemq.broker.region.Destination;
import org.apache.activemq.broker.region.MessageReference;
import org.apache.activemq.broker.region.Subscription;
import org.apache.activemq.command.ActiveMQDestination;
import org.apache.activemq.command.BrokerId;
import org.apache.activemq.command.BrokerInfo;
import org.apache.activemq.command.ConnectionInfo;
import org.apache.activemq.command.ConsumerControl;
import org.apache.activemq.command.ConsumerInfo;
import org.apache.activemq.command.DestinationInfo;
import org.apache.activemq.command.Message;
import org.apache.activemq.command.MessageAck;
import org.apache.activemq.command.MessageDispatch;
import org.apache.activemq.command.MessageDispatchNotification;
import org.apache.activemq.command.MessagePull;
import org.apache.activemq.command.ProducerInfo;
import org.apache.activemq.command.RemoveSubscriptionInfo;
import org.apache.activemq.command.Response;
import org.apache.activemq.command.SessionInfo;
import org.apache.activemq.command.TransactionId;
import org.apache.activemq.store.PListStore;
import org.apache.activemq.thread.Scheduler;
import org.apache.activemq.usage.Usage;
import org.junit.rules.TemporaryFolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class ArtemisBrokerBase implements Broker {
private static final Logger LOG = LoggerFactory.getLogger(ArtemisBrokerBase.class);
public static final String INVM_ACCEPTOR_FACTORY = InVMAcceptorFactory.class.getCanonicalName();
public static final String NETTY_ACCEPTOR_FACTORY = NettyAcceptorFactory.class.getCanonicalName();
public static final String NETTY_CONNECTOR_FACTORY = NettyConnectorFactory.class.getCanonicalName();
protected static final String CLUSTER_PASSWORD = "UnitTestsClusterPassword";
protected volatile boolean stopped;
protected BrokerId brokerId = new BrokerId("Artemis Broker");
protected BrokerService bservice;
protected TemporaryFolder temporaryFolder = new TemporaryFolder();
protected String testDir;
protected boolean realStore = false;
protected ActiveMQServer server;
protected boolean enableSecurity = false;
public ArtemisBrokerBase() {
try {
this.temporaryFolder.create();
}
catch (IOException e) {
}
}
@Override
public Destination addDestination(ConnectionContext context,
ActiveMQDestination destination,
boolean createIfTemporary) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void removeDestination(ConnectionContext context,
ActiveMQDestination destination,
long timeout) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public Map<ActiveMQDestination, Destination> getDestinationMap() {
throw new RuntimeException("Don't call me!");
}
@Override
public Subscription addConsumer(ConnectionContext context, ConsumerInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void removeConsumer(ConnectionContext context, ConsumerInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void removeSubscription(ConnectionContext context, RemoveSubscriptionInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void send(ProducerBrokerExchange producerExchange, Message message) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void acknowledge(ConsumerBrokerExchange consumerExchange, MessageAck ack) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public Response messagePull(ConnectionContext context, MessagePull pull) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void processDispatchNotification(MessageDispatchNotification messageDispatchNotification) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void gc() {
throw new RuntimeException("Don't call me!");
}
@Override
public Set<Destination> getDestinations(ActiveMQDestination destination) {
throw new RuntimeException("Don't call me!");
}
@Override
public void processConsumerControl(ConsumerBrokerExchange consumerExchange, ConsumerControl control) {
throw new RuntimeException("Don't call me!");
}
@Override
public void reapplyInterceptor() {
throw new RuntimeException("Don't call me!");
}
@Override
public Broker getAdaptor(Class type) {
throw new RuntimeException("Don't call me!");
}
@Override
public BrokerId getBrokerId() {
return brokerId;
}
@Override
public String getBrokerName() {
return "Artemis Broker";
}
@Override
public void addBroker(Connection connection, BrokerInfo info) {
throw new RuntimeException("Don't call me!");
}
@Override
public void removeBroker(Connection connection, BrokerInfo info) {
throw new RuntimeException("Don't call me!");
}
@Override
public void addConnection(ConnectionContext context, ConnectionInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void removeConnection(ConnectionContext context, ConnectionInfo info, Throwable error) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void addSession(ConnectionContext context, SessionInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void removeSession(ConnectionContext context, SessionInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void addProducer(ConnectionContext context, ProducerInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void removeProducer(ConnectionContext context, ProducerInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public Connection[] getClients() throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public ActiveMQDestination[] getDestinations() throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public Map<ActiveMQDestination, Destination> getDestinationMap(ActiveMQDestination destination) {
throw new RuntimeException("Don't call me!");
}
@Override
public TransactionId[] getPreparedTransactions(ConnectionContext context) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void beginTransaction(ConnectionContext context, TransactionId xid) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public int prepareTransaction(ConnectionContext context, TransactionId xid) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void rollbackTransaction(ConnectionContext context, TransactionId xid) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void commitTransaction(ConnectionContext context, TransactionId xid, boolean onePhase) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void forgetTransaction(ConnectionContext context, TransactionId transactionId) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public BrokerInfo[] getPeerBrokerInfos() {
return null;
}
@Override
public void preProcessDispatch(MessageDispatch messageDispatch) {
throw new RuntimeException("Don't call me!");
}
@Override
public void postProcessDispatch(MessageDispatch messageDispatch) {
throw new RuntimeException("Don't call me!");
}
@Override
public boolean isStopped() {
return stopped;
}
@Override
public Set<ActiveMQDestination> getDurableDestinations() {
throw new RuntimeException("Don't call me!");
}
@Override
public void addDestinationInfo(ConnectionContext context, DestinationInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public void removeDestinationInfo(ConnectionContext context, DestinationInfo info) throws Exception {
throw new RuntimeException("Don't call me!");
}
@Override
public boolean isFaultTolerantConfiguration() {
return false;
}
@Override
public ConnectionContext getAdminConnectionContext() {
return null;
}
@Override
public void setAdminConnectionContext(ConnectionContext adminConnectionContext) {
//
}
@Override
public PListStore getTempDataStore() {
throw new RuntimeException("Don't call me!");
}
@Override
public URI getVmConnectorURI() {
throw new RuntimeException("Don't call me!");
}
@Override
public void brokerServiceStarted() {
stopped = false;
}
@Override
public BrokerService getBrokerService() {
return this.bservice;
}
@Override
public Broker getRoot() {
return this;
}
@Override
public boolean isExpired(MessageReference messageReference) {
throw new RuntimeException("Don't call me!");
}
@Override
public void messageExpired(ConnectionContext context, MessageReference messageReference, Subscription subscription) {
throw new RuntimeException("Don't call me!");
}
@Override
public boolean sendToDeadLetterQueue(ConnectionContext context,
MessageReference messageReference,
Subscription subscription,
Throwable poisonCause) {
throw new RuntimeException("Don't call me!");
}
@Override
public long getBrokerSequenceId() {
throw new RuntimeException("Don't call me!");
}
@Override
public void messageConsumed(ConnectionContext context, MessageReference messageReference) {
throw new RuntimeException("Don't call me!");
}
@Override
public void messageDelivered(ConnectionContext context, MessageReference messageReference) {
throw new RuntimeException("Don't call me!");
}
@Override
public void messageDiscarded(ConnectionContext context, Subscription sub, MessageReference messageReference) {
throw new RuntimeException("Don't call me!");
}
@Override
public void slowConsumer(ConnectionContext context, Destination destination, Subscription subs) {
throw new RuntimeException("Don't call me!");
}
@Override
public void fastProducer(ConnectionContext context, ProducerInfo producerInfo, ActiveMQDestination destination) {
throw new RuntimeException("Don't call me!");
}
@Override
public void isFull(ConnectionContext context, Destination destination, Usage usage) {
throw new RuntimeException("Don't call me!");
}
@Override
public void nowMasterBroker() {
}
@Override
public Scheduler getScheduler() {
throw new RuntimeException("Don't call me!");
}
@Override
public ThreadPoolExecutor getExecutor() {
throw new RuntimeException("Don't call me!");
}
@Override
public void networkBridgeStarted(BrokerInfo brokerInfo, boolean createdByDuplex, String remoteIp) {
throw new RuntimeException("Don't call me!");
}
@Override
public void networkBridgeStopped(BrokerInfo brokerInfo) {
throw new RuntimeException("Don't call me!");
}
protected final ActiveMQServer createServer(final boolean realFiles, final boolean netty) throws Exception {
return createServer(realFiles, createDefaultConfig(netty), -1, -1, new HashMap<String, AddressSettings>());
}
protected final ActiveMQServer createServer(final boolean realFiles,
final Configuration configuration,
final int pageSize,
final int maxAddressSize,
final Map<String, AddressSettings> settings) {
return createServer(realFiles, configuration, pageSize, maxAddressSize, AddressFullMessagePolicy.PAGE, settings);
}
protected final ActiveMQServer createServer(final boolean realFiles,
final Configuration configuration,
final int pageSize,
final int maxAddressSize,
final AddressFullMessagePolicy fullPolicy,
final Map<String, AddressSettings> settings) {
ActiveMQServer server = ActiveMQServers.newActiveMQServer(configuration, realFiles);
if (settings != null) {
for (Map.Entry<String, AddressSettings> setting : settings.entrySet()) {
server.getAddressSettingsRepository().addMatch(setting.getKey(), setting.getValue());
}
}
AddressSettings defaultSetting = new AddressSettings();
defaultSetting.setPageSizeBytes(pageSize);
defaultSetting.setMaxSizeBytes(maxAddressSize);
defaultSetting.setAddressFullMessagePolicy(fullPolicy);
server.getAddressSettingsRepository().addMatch("#", defaultSetting);
return server;
}
protected Configuration createDefaultConfig(final boolean netty) throws Exception {
if (netty) {
return createDefaultConfig(new HashMap<String, Object>(), NETTY_ACCEPTOR_FACTORY);
}
else {
return createDefaultConfig(new HashMap<String, Object>(), INVM_ACCEPTOR_FACTORY);
}
}
protected Configuration createDefaultConfig(final Map<String, Object> params,
final String... acceptors) throws Exception {
ConfigurationImpl configuration = createBasicConfig(-1).setJMXManagementEnabled(false).clearAcceptorConfigurations();
for (String acceptor : acceptors) {
TransportConfiguration transportConfig = new TransportConfiguration(acceptor, params);
configuration.addAcceptorConfiguration(transportConfig);
}
return configuration;
}
protected final ConfigurationImpl createBasicConfig(final int serverID) {
ConfigurationImpl configuration = new ConfigurationImpl().setSecurityEnabled(false).setJournalMinFiles(2).setJournalFileSize(100 * 1024).setJournalType(getDefaultJournalType()).setJournalDirectory(getJournalDir(serverID, false)).setBindingsDirectory(getBindingsDir(serverID, false)).setPagingDirectory(getPageDir(serverID, false)).setLargeMessagesDirectory(getLargeMessagesDir(serverID, false)).setJournalCompactMinFiles(0).setJournalCompactPercentage(0).setClusterPassword(CLUSTER_PASSWORD);
return configuration;
}
protected String getLargeMessagesDir(final int index, final boolean backup) {
return getLargeMessagesDir(testDir, index, backup);
}
protected static String getLargeMessagesDir(final String testDir, final int index, final boolean backup) {
return getLargeMessagesDir(testDir) + directoryNameSuffix(index, backup);
}
protected String getPageDir(final int index, final boolean backup) {
return getPageDir(testDir, index, backup);
}
protected static String getPageDir(final String testDir, final int index, final boolean backup) {
return getPageDir(testDir) + directoryNameSuffix(index, backup);
}
protected String getBindingsDir(final int index, final boolean backup) {
return getBindingsDir(testDir, index, backup);
}
protected static String getBindingsDir(final String testDir, final int index, final boolean backup) {
return getBindingsDir(testDir) + directoryNameSuffix(index, backup);
}
protected String getJournalDir(final int index, final boolean backup) {
return getJournalDir(testDir, index, backup);
}
protected static String getJournalDir(final String testDir, final int index, final boolean backup) {
return getJournalDir(testDir) + directoryNameSuffix(index, backup);
}
private static String directoryNameSuffix(int index, boolean backup) {
if (index == -1)
return "";
return index + "-" + (backup ? "B" : "L");
}
protected static JournalType getDefaultJournalType() {
if (LibaioContext.isLoaded()) {
return JournalType.ASYNCIO;
}
else {
return JournalType.NIO;
}
}
protected final void clearDataRecreateServerDirs() {
clearDataRecreateServerDirs(testDir);
}
protected void clearDataRecreateServerDirs(final String testDir1) {
// Need to delete the root
File file = new File(testDir1);
deleteDirectory(file);
file.mkdirs();
recreateDirectory(getJournalDir(testDir1));
recreateDirectory(getBindingsDir(testDir1));
recreateDirectory(getPageDir(testDir1));
recreateDirectory(getLargeMessagesDir(testDir1));
recreateDirectory(getClientLargeMessagesDir(testDir1));
recreateDirectory(getTemporaryDir(testDir1));
}
protected String getTemporaryDir(final String testDir1) {
return testDir1 + "/temp";
}
protected String getClientLargeMessagesDir(final String testDir1) {
return testDir1 + "/client-large-msg";
}
protected static String getLargeMessagesDir(final String testDir1) {
return testDir1 + "/large-msg";
}
protected static String getPageDir(final String testDir1) {
return testDir1 + "/page";
}
protected static String getBindingsDir(final String testDir1) {
return testDir1 + "/bindings";
}
protected static String getJournalDir(final String testDir1) {
return testDir1 + "/journal";
}
protected static final void recreateDirectory(final String directory) {
File file = new File(directory);
deleteDirectory(file);
file.mkdirs();
}
protected static final boolean deleteDirectory(final File directory) {
if (directory.isDirectory()) {
String[] files = directory.list();
int num = 5;
int attempts = 0;
while (files == null && (attempts < num)) {
try {
Thread.sleep(100);
}
catch (InterruptedException e) {
}
files = directory.list();
attempts++;
}
for (String file : files) {
File f = new File(directory, file);
if (!deleteDirectory(f)) {
LOG.warn("Failed to clean up file: " + f.getAbsolutePath());
}
}
}
return directory.delete();
}
public ActiveMQServer getServer()
{
return server;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import static org.apache.solr.core.SolrConfig.PluginOpts.*;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.schema.IndexSchemaFactory;
import org.apache.solr.util.DOMUtil;
import org.apache.solr.util.FileUtils;
import org.apache.solr.util.RegexFileFilter;
import org.apache.solr.handler.component.SearchComponent;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.QueryResponseWriter;
import org.apache.solr.response.transform.TransformerFactory;
import org.apache.solr.search.CacheConfig;
import org.apache.solr.search.FastLRUCache;
import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.ValueSourceParser;
import org.apache.solr.servlet.SolrRequestParsers;
import org.apache.solr.update.SolrIndexConfig;
import org.apache.solr.update.UpdateLog;
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
import org.apache.solr.spelling.QueryConverter;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathConstants;
import java.io.File;
import java.util.*;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.io.FileFilter;
import java.io.IOException;
/**
* Provides a static reference to a Config object modeling the main
* configuration data for a a Solr instance -- typically found in
* "solrconfig.xml".
*/
public class SolrConfig extends Config {
public static final Logger log = LoggerFactory.getLogger(SolrConfig.class);
public static final String DEFAULT_CONF_FILE = "solrconfig.xml";
static enum PluginOpts {
MULTI_OK,
REQUIRE_NAME,
REQUIRE_CLASS,
// EnumSet.of and/or EnumSet.copyOf(Collection) are anoying
// because of type determination
NOOP
}
private int multipartUploadLimitKB;
private int formUploadLimitKB;
private boolean enableRemoteStreams;
private boolean handleSelect;
private boolean addHttpRequestToContext;
private final SolrRequestParsers solrRequestParsers;
/** Creates a default instance from the solrconfig.xml. */
public SolrConfig()
throws ParserConfigurationException, IOException, SAXException {
this( (SolrResourceLoader) null, DEFAULT_CONF_FILE, null );
}
/** Creates a configuration instance from a configuration name.
* A default resource loader will be created (@see SolrResourceLoader)
*@param name the configuration name used by the loader
*/
public SolrConfig(String name)
throws ParserConfigurationException, IOException, SAXException {
this( (SolrResourceLoader) null, name, null);
}
/** Creates a configuration instance from a configuration name and stream.
* A default resource loader will be created (@see SolrResourceLoader).
* If the stream is null, the resource loader will open the configuration stream.
* If the stream is not null, no attempt to load the resource will occur (the name is not used).
*@param name the configuration name
*@param is the configuration stream
*/
public SolrConfig(String name, InputSource is)
throws ParserConfigurationException, IOException, SAXException {
this( (SolrResourceLoader) null, name, is );
}
/** Creates a configuration instance from an instance directory, configuration name and stream.
*@param instanceDir the directory used to create the resource loader
*@param name the configuration name used by the loader if the stream is null
*@param is the configuration stream
*/
public SolrConfig(String instanceDir, String name, InputSource is)
throws ParserConfigurationException, IOException, SAXException {
this(new SolrResourceLoader(instanceDir), name, is);
}
/** Creates a configuration instance from a resource loader, a configuration name and a stream.
* If the stream is null, the resource loader will open the configuration stream.
* If the stream is not null, no attempt to load the resource will occur (the name is not used).
*@param loader the resource loader
*@param name the configuration name
*@param is the configuration stream
*/
public SolrConfig(SolrResourceLoader loader, String name, InputSource is)
throws ParserConfigurationException, IOException, SAXException {
super(loader, name, is, "/config/");
initLibs();
luceneMatchVersion = getLuceneVersion("luceneMatchVersion");
String indexConfigPrefix;
// Old indexDefaults and mainIndex sections are deprecated and fails fast for luceneMatchVersion=>LUCENE_40.
// For older solrconfig.xml's we allow the old sections, but never mixed with the new <indexConfig>
boolean hasDeprecatedIndexConfig = (getNode("indexDefaults", false) != null) || (getNode("mainIndex", false) != null);
boolean hasNewIndexConfig = getNode("indexConfig", false) != null;
if(hasDeprecatedIndexConfig){
if(luceneMatchVersion.onOrAfter(Version.LUCENE_40)) {
throw new SolrException(ErrorCode.FORBIDDEN, "<indexDefaults> and <mainIndex> configuration sections are discontinued. Use <indexConfig> instead.");
} else {
// Still allow the old sections for older LuceneMatchVersion's
if(hasNewIndexConfig) {
throw new SolrException(ErrorCode.FORBIDDEN, "Cannot specify both <indexDefaults>, <mainIndex> and <indexConfig> at the same time. Please use <indexConfig> only.");
}
log.warn("<indexDefaults> and <mainIndex> configuration sections are deprecated and will fail for luceneMatchVersion=LUCENE_40 and later. Please use <indexConfig> instead.");
defaultIndexConfig = new SolrIndexConfig(this, "indexDefaults", null);
mainIndexConfig = new SolrIndexConfig(this, "mainIndex", defaultIndexConfig);
indexConfigPrefix = "mainIndex";
}
} else {
defaultIndexConfig = mainIndexConfig = null;
indexConfigPrefix = "indexConfig";
}
nrtMode = getBool(indexConfigPrefix+"/nrtMode", true);
// Parse indexConfig section, using mainIndex as backup in case old config is used
indexConfig = new SolrIndexConfig(this, "indexConfig", mainIndexConfig);
booleanQueryMaxClauseCount = getInt("query/maxBooleanClauses", BooleanQuery.getMaxClauseCount());
log.info("Using Lucene MatchVersion: " + luceneMatchVersion);
// Warn about deprecated / discontinued parameters
// boolToFilterOptimizer has had no effect since 3.1
if(get("query/boolTofilterOptimizer", null) != null)
log.warn("solrconfig.xml: <boolTofilterOptimizer> is currently not implemented and has no effect.");
if(get("query/HashDocSet", null) != null)
log.warn("solrconfig.xml: <HashDocSet> is deprecated and no longer recommended used.");
// TODO: Old code - in case somebody wants to re-enable. Also see SolrIndexSearcher#search()
// filtOptEnabled = getBool("query/boolTofilterOptimizer/@enabled", false);
// filtOptCacheSize = getInt("query/boolTofilterOptimizer/@cacheSize",32);
// filtOptThreshold = getFloat("query/boolTofilterOptimizer/@threshold",.05f);
useFilterForSortedQuery = getBool("query/useFilterForSortedQuery", false);
queryResultWindowSize = Math.max(1, getInt("query/queryResultWindowSize", 1));
queryResultMaxDocsCached = getInt("query/queryResultMaxDocsCached", Integer.MAX_VALUE);
enableLazyFieldLoading = getBool("query/enableLazyFieldLoading", false);
filterCacheConfig = CacheConfig.getConfig(this, "query/filterCache");
queryResultCacheConfig = CacheConfig.getConfig(this, "query/queryResultCache");
documentCacheConfig = CacheConfig.getConfig(this, "query/documentCache");
CacheConfig conf = CacheConfig.getConfig(this, "query/fieldValueCache");
if (conf == null) {
Map<String,String> args = new HashMap<String,String>();
args.put("name","fieldValueCache");
args.put("size","10000");
args.put("initialSize","10");
args.put("showItems","-1");
conf = new CacheConfig(FastLRUCache.class, args, null);
}
fieldValueCacheConfig = conf;
unlockOnStartup = getBool(indexConfigPrefix+"/unlockOnStartup", false);
useColdSearcher = getBool("query/useColdSearcher",false);
dataDir = get("dataDir", null);
if (dataDir != null && dataDir.length()==0) dataDir=null;
userCacheConfigs = CacheConfig.getMultipleConfigs(this, "query/cache");
org.apache.solr.search.SolrIndexSearcher.initRegenerators(this);
hashSetInverseLoadFactor = 1.0f / getFloat("//HashDocSet/@loadFactor",0.75f);
hashDocSetMaxSize= getInt("//HashDocSet/@maxSize",3000);
httpCachingConfig = new HttpCachingConfig(this);
Node jmx = getNode("jmx", false);
if (jmx != null) {
jmxConfig = new JmxConfiguration(true,
get("jmx/@agentId", null),
get("jmx/@serviceUrl", null),
get("jmx/@rootName", null));
} else {
jmxConfig = new JmxConfiguration(false, null, null, null);
}
maxWarmingSearchers = getInt("query/maxWarmingSearchers",Integer.MAX_VALUE);
loadPluginInfo(SolrRequestHandler.class,"requestHandler",
REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK);
loadPluginInfo(QParserPlugin.class,"queryParser",
REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK);
loadPluginInfo(QueryResponseWriter.class,"queryResponseWriter",
REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK);
loadPluginInfo(ValueSourceParser.class,"valueSourceParser",
REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK);
loadPluginInfo(TransformerFactory.class,"transformer",
REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK);
loadPluginInfo(SearchComponent.class,"searchComponent",
REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK);
// TODO: WTF is up with queryConverter???
// it aparently *only* works as a singleton? - SOLR-4304
// and even then -- only if there is a single SpellCheckComponent
// because of queryConverter.setAnalyzer
loadPluginInfo(QueryConverter.class,"queryConverter",
REQUIRE_NAME, REQUIRE_CLASS);
// this is hackish, since it picks up all SolrEventListeners,
// regardless of when/how/why they are used (or even if they are
// declared outside of the appropriate context) but there's no nice
// way around that in the PluginInfo framework
loadPluginInfo(SolrEventListener.class, "//listener",
REQUIRE_CLASS, MULTI_OK);
loadPluginInfo(DirectoryFactory.class,"directoryFactory",
REQUIRE_CLASS);
loadPluginInfo(IndexDeletionPolicy.class,indexConfigPrefix+"/deletionPolicy",
REQUIRE_CLASS);
loadPluginInfo(CodecFactory.class,"codecFactory",
REQUIRE_CLASS);
loadPluginInfo(IndexReaderFactory.class,"indexReaderFactory",
REQUIRE_CLASS);
loadPluginInfo(UpdateRequestProcessorChain.class,"updateRequestProcessorChain",
MULTI_OK);
loadPluginInfo(UpdateLog.class,"updateHandler/updateLog");
loadPluginInfo(IndexSchemaFactory.class,"schemaFactory",
REQUIRE_CLASS);
updateHandlerInfo = loadUpdatehandlerInfo();
multipartUploadLimitKB = getInt(
"requestDispatcher/requestParsers/@multipartUploadLimitInKB", 2048 );
formUploadLimitKB = getInt(
"requestDispatcher/requestParsers/@formdataUploadLimitInKB", 2048 );
enableRemoteStreams = getBool(
"requestDispatcher/requestParsers/@enableRemoteStreaming", false );
// Let this filter take care of /select?xxx format
handleSelect = getBool(
"requestDispatcher/@handleSelect", true );
addHttpRequestToContext = getBool(
"requestDispatcher/requestParsers/@addHttpRequestToContext", false );
solrRequestParsers = new SolrRequestParsers(this);
Config.log.info("Loaded SolrConfig: " + name);
}
protected UpdateHandlerInfo loadUpdatehandlerInfo() {
return new UpdateHandlerInfo(get("updateHandler/@class",null),
getInt("updateHandler/autoCommit/maxDocs",-1),
getInt("updateHandler/autoCommit/maxTime",-1),
getBool("updateHandler/autoCommit/openSearcher",true),
getInt("updateHandler/commitIntervalLowerBound",-1),
getInt("updateHandler/autoSoftCommit/maxDocs",-1),
getInt("updateHandler/autoSoftCommit/maxTime",-1),
getBool("updateHandler/commitWithin/softCommit",true));
}
private void loadPluginInfo(Class clazz, String tag, PluginOpts... opts) {
EnumSet<PluginOpts> options = EnumSet.<PluginOpts>of(NOOP, opts);
boolean requireName = options.contains(REQUIRE_NAME);
boolean requireClass = options.contains(REQUIRE_CLASS);
List<PluginInfo> result = readPluginInfos(tag, requireName, requireClass);
if (1 < result.size() && ! options.contains(MULTI_OK)) {
throw new SolrException
(SolrException.ErrorCode.SERVER_ERROR,
"Found " + result.size() + " configuration sections when at most "
+ "1 is allowed matching expression: " + tag);
}
if(!result.isEmpty()) pluginStore.put(clazz.getName(),result);
}
public List<PluginInfo> readPluginInfos(String tag, boolean requireName, boolean requireClass) {
ArrayList<PluginInfo> result = new ArrayList<PluginInfo>();
NodeList nodes = (NodeList) evaluate(tag, XPathConstants.NODESET);
for (int i=0; i<nodes.getLength(); i++) {
PluginInfo pluginInfo = new PluginInfo(nodes.item(i), "[solrconfig.xml] " + tag, requireName, requireClass);
if(pluginInfo.isEnabled()) result.add(pluginInfo);
}
return result;
}
public SolrRequestParsers getRequestParsers() {
return solrRequestParsers;
}
/* The set of materialized parameters: */
public final int booleanQueryMaxClauseCount;
// SolrIndexSearcher - nutch optimizer -- Disabled since 3.1
// public final boolean filtOptEnabled;
// public final int filtOptCacheSize;
// public final float filtOptThreshold;
// SolrIndexSearcher - caches configurations
public final CacheConfig filterCacheConfig ;
public final CacheConfig queryResultCacheConfig;
public final CacheConfig documentCacheConfig;
public final CacheConfig fieldValueCacheConfig;
public final CacheConfig[] userCacheConfigs;
// SolrIndexSearcher - more...
public final boolean useFilterForSortedQuery;
public final int queryResultWindowSize;
public final int queryResultMaxDocsCached;
public final boolean enableLazyFieldLoading;
public final boolean nrtMode;
// DocSet
public final float hashSetInverseLoadFactor;
public final int hashDocSetMaxSize;
// default & main index configurations, deprecated as of 3.6
@Deprecated
public final SolrIndexConfig defaultIndexConfig;
@Deprecated
public final SolrIndexConfig mainIndexConfig;
// IndexConfig settings
public final SolrIndexConfig indexConfig;
protected UpdateHandlerInfo updateHandlerInfo ;
private Map<String, List<PluginInfo>> pluginStore = new LinkedHashMap<String, List<PluginInfo>>();
public final int maxWarmingSearchers;
public final boolean unlockOnStartup;
public final boolean useColdSearcher;
public final Version luceneMatchVersion;
protected String dataDir;
//JMX configuration
public final JmxConfiguration jmxConfig;
private final HttpCachingConfig httpCachingConfig;
public HttpCachingConfig getHttpCachingConfig() {
return httpCachingConfig;
}
public static class JmxConfiguration {
public boolean enabled = false;
public String agentId;
public String serviceUrl;
public String rootName;
public JmxConfiguration(boolean enabled,
String agentId,
String serviceUrl,
String rootName) {
this.enabled = enabled;
this.agentId = agentId;
this.serviceUrl = serviceUrl;
this.rootName = rootName;
if (agentId != null && serviceUrl != null) {
throw new SolrException
(SolrException.ErrorCode.SERVER_ERROR,
"Incorrect JMX Configuration in solrconfig.xml, "+
"both agentId and serviceUrl cannot be specified at the same time");
}
}
}
public static class HttpCachingConfig {
/** config xpath prefix for getting HTTP Caching options */
private final static String CACHE_PRE
= "requestDispatcher/httpCaching/";
/** For extracting Expires "ttl" from <cacheControl> config */
private final static Pattern MAX_AGE
= Pattern.compile("\\bmax-age=(\\d+)");
public static enum LastModFrom {
OPENTIME, DIRLASTMOD, BOGUS;
/** Input must not be null */
public static LastModFrom parse(final String s) {
try {
return valueOf(s.toUpperCase(Locale.ROOT));
} catch (Exception e) {
log.warn( "Unrecognized value for lastModFrom: " + s, e);
return BOGUS;
}
}
}
private final boolean never304;
private final String etagSeed;
private final String cacheControlHeader;
private final Long maxAge;
private final LastModFrom lastModFrom;
private HttpCachingConfig(SolrConfig conf) {
never304 = conf.getBool(CACHE_PRE+"@never304", false);
etagSeed = conf.get(CACHE_PRE+"@etagSeed", "Solr");
lastModFrom = LastModFrom.parse(conf.get(CACHE_PRE+"@lastModFrom",
"openTime"));
cacheControlHeader = conf.get(CACHE_PRE+"cacheControl",null);
Long tmp = null; // maxAge
if (null != cacheControlHeader) {
try {
final Matcher ttlMatcher = MAX_AGE.matcher(cacheControlHeader);
final String ttlStr = ttlMatcher.find() ? ttlMatcher.group(1) : null;
tmp = (null != ttlStr && !"".equals(ttlStr))
? Long.valueOf(ttlStr)
: null;
} catch (Exception e) {
log.warn( "Ignoring exception while attempting to " +
"extract max-age from cacheControl config: " +
cacheControlHeader, e);
}
}
maxAge = tmp;
}
public boolean isNever304() { return never304; }
public String getEtagSeed() { return etagSeed; }
/** null if no Cache-Control header */
public String getCacheControlHeader() { return cacheControlHeader; }
/** null if no max age limitation */
public Long getMaxAge() { return maxAge; }
public LastModFrom getLastModFrom() { return lastModFrom; }
}
public static class UpdateHandlerInfo{
public final String className;
public final int autoCommmitMaxDocs,autoCommmitMaxTime,commitIntervalLowerBound,
autoSoftCommmitMaxDocs,autoSoftCommmitMaxTime;
public final boolean openSearcher; // is opening a new searcher part of hard autocommit?
public final boolean commitWithinSoftCommit;
/**
* @param autoCommmitMaxDocs set -1 as default
* @param autoCommmitMaxTime set -1 as default
* @param commitIntervalLowerBound set -1 as default
*/
public UpdateHandlerInfo(String className, int autoCommmitMaxDocs, int autoCommmitMaxTime, boolean openSearcher, int commitIntervalLowerBound,
int autoSoftCommmitMaxDocs, int autoSoftCommmitMaxTime, boolean commitWithinSoftCommit) {
this.className = className;
this.autoCommmitMaxDocs = autoCommmitMaxDocs;
this.autoCommmitMaxTime = autoCommmitMaxTime;
this.openSearcher = openSearcher;
this.commitIntervalLowerBound = commitIntervalLowerBound;
this.autoSoftCommmitMaxDocs = autoSoftCommmitMaxDocs;
this.autoSoftCommmitMaxTime = autoSoftCommmitMaxTime;
this.commitWithinSoftCommit = commitWithinSoftCommit;
}
}
// public Map<String, List<PluginInfo>> getUpdateProcessorChainInfo() { return updateProcessorChainInfo; }
public UpdateHandlerInfo getUpdateHandlerInfo() { return updateHandlerInfo; }
public String getDataDir() { return dataDir; }
/**SolrConfig keeps a repository of plugins by the type. The known interfaces are the types.
* @param type The key is FQN of the plugin class there are a few known types : SolrFormatter, SolrFragmenter
* SolrRequestHandler,QParserPlugin, QueryResponseWriter,ValueSourceParser,
* SearchComponent, QueryConverter, SolrEventListener, DirectoryFactory,
* IndexDeletionPolicy, IndexReaderFactory, {@link TransformerFactory}
*/
public List<PluginInfo> getPluginInfos(String type){
List<PluginInfo> result = pluginStore.get(type);
return result == null ? Collections.<PluginInfo>emptyList(): result;
}
public PluginInfo getPluginInfo(String type){
List<PluginInfo> result = pluginStore.get(type);
if (result == null || result.isEmpty()) {
return null;
}
if (1 == result.size()) {
return result.get(0);
}
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Multiple plugins configured for type: " + type);
}
private void initLibs() {
NodeList nodes = (NodeList) evaluate("lib", XPathConstants.NODESET);
if (nodes == null || nodes.getLength() == 0) return;
log.info("Adding specified lib dirs to ClassLoader");
SolrResourceLoader loader = getResourceLoader();
try {
for (int i = 0; i < nodes.getLength(); i++) {
Node node = nodes.item(i);
String baseDir = DOMUtil.getAttr(node, "dir");
String path = DOMUtil.getAttr(node, "path");
if (null != baseDir) {
// :TODO: add support for a simpler 'glob' mutually exclusive of regex
String regex = DOMUtil.getAttr(node, "regex");
FileFilter filter = (null == regex) ? null : new RegexFileFilter(regex);
loader.addToClassLoader(baseDir, filter, false);
} else if (null != path) {
final File file = FileUtils.resolvePath(new File(loader.getInstanceDir()), path);
loader.addToClassLoader(file.getParent(), new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.equals(file);
}
}, false);
} else {
throw new RuntimeException(
"lib: missing mandatory attributes: 'dir' or 'path'");
}
}
} finally {
loader.reloadLuceneSPI();
}
}
public int getMultipartUploadLimitKB() {
return multipartUploadLimitKB;
}
public int getFormUploadLimitKB() {
return formUploadLimitKB;
}
public boolean isHandleSelect() {
return handleSelect;
}
public boolean isAddHttpRequestToContext() {
return addHttpRequestToContext;
}
public boolean isEnableRemoteStreams() {
return enableRemoteStreams;
}
}
| |
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* BPS Bildungsportal Sachsen GmbH, http://www.bps-system.de
* <p>
*/
package de.bps.course.nodes;
import java.util.ArrayList;
import java.util.List;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.stack.StackedController;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.tabbable.TabbableController;
import org.olat.core.helpers.Settings;
import org.olat.core.util.Util;
import org.olat.course.ICourse;
import org.olat.course.condition.ConditionEditController;
import org.olat.course.editor.CourseEditorEnv;
import org.olat.course.editor.NodeEditController;
import org.olat.course.editor.StatusDescription;
import org.olat.course.nodes.AbstractAccessableCourseNode;
import org.olat.course.nodes.CourseNode;
import org.olat.course.nodes.StatusDescriptionHelper;
import org.olat.course.nodes.TitledWrapperHelper;
import org.olat.course.run.navigation.NodeRunConstructionResult;
import org.olat.course.run.userview.NodeEvaluation;
import org.olat.course.run.userview.UserCourseEnvironment;
import org.olat.modules.ModuleConfiguration;
import org.olat.repository.RepositoryEntry;
import de.bps.course.nodes.ll.LLEditController;
import de.bps.course.nodes.ll.LLModel;
import de.bps.course.nodes.ll.LLRunController;
/**
* Description:<br>
* Link list course node.
*
* <P>
* Initial Date: 05.11.2008 <br>
*
* @author Marcel Karras (toka@freebits.de)
*/
public class LLCourseNode extends AbstractAccessableCourseNode {
private static final String TYPE = "ll";
public static final String CONF_COURSE_ID = "ll_course_id";
public static final String CONF_COURSE_NODE_ID = "ll_course_node_id";
public static final String CONF_LINKLIST = "ll_link_list";
/**
* Create default link list course node.
*/
public LLCourseNode() {
super(TYPE);
initDefaultConfig();
}
private void initDefaultConfig() {
ModuleConfiguration config = getModuleConfiguration();
// add an empty link entry as default if none existent
if (config.get(CONF_LINKLIST) == null) {
List<LLModel> initialList = new ArrayList<LLModel>(1);
initialList.add(new LLModel());
config.set(CONF_LINKLIST, initialList);
}
}
@Override
public void updateModuleConfigDefaults(boolean isNewNode) {
ModuleConfiguration config = getModuleConfiguration();
if(config.getConfigurationVersion() < 2) {
List<LLModel> links = (List<LLModel>)config.get(CONF_LINKLIST);
for(LLModel link:links) {
String linkValue = link.getTarget();
if(!linkValue.contains("://")) {
linkValue = "http://".concat(linkValue.trim());
}
if(linkValue.startsWith(Settings.getServerContextPathURI())) {
link.setHtmlTarget("_self");
} else {
link.setHtmlTarget("_blank");
}
}
config.setConfigurationVersion(2);
}
}
/**
* {@inheritDoc}
*/
@Override
public TabbableController createEditController(UserRequest ureq, WindowControl wControl, StackedController stackPanel, ICourse course,
UserCourseEnvironment userCourseEnv) {
updateModuleConfigDefaults(false);
LLEditController childTabCntrllr = new LLEditController(getModuleConfiguration(), ureq, wControl, this, course, userCourseEnv);
CourseNode chosenNode = course.getEditorTreeModel().getCourseNode(userCourseEnv.getCourseEditorEnv().getCurrentCourseNodeId());
// needed for DENEditController.isConfigValid()
getModuleConfiguration().set(CONF_COURSE_ID, course.getResourceableId());
getModuleConfiguration().set(CONF_COURSE_NODE_ID, chosenNode.getIdent());
return new NodeEditController(ureq, wControl, course.getEditorTreeModel(), course, chosenNode, course.getCourseEnvironment()
.getCourseGroupManager(), userCourseEnv, childTabCntrllr);
}
/**
* {@inheritDoc}
*/
@Override
public NodeRunConstructionResult createNodeRunConstructionResult(UserRequest ureq, WindowControl wControl,
UserCourseEnvironment userCourseEnv, NodeEvaluation ne, String nodecmd) {
updateModuleConfigDefaults(false);
Controller controller = new LLRunController(ureq, wControl, getModuleConfiguration(), this, userCourseEnv, true);
controller = TitledWrapperHelper.getWrapper(ureq, wControl, controller, this, "o_ll_icon");
return new NodeRunConstructionResult(controller);
}
/**
* @see org.olat.course.nodes.GenericCourseNode#createPeekViewRunController(org.olat.core.gui.UserRequest,
* org.olat.core.gui.control.WindowControl,
* org.olat.course.run.userview.UserCourseEnvironment,
* org.olat.course.run.userview.NodeEvaluation)
*/
@Override
public Controller createPeekViewRunController(UserRequest ureq, WindowControl wControl, UserCourseEnvironment userCourseEnv,
NodeEvaluation ne) {
updateModuleConfigDefaults(false);
// Use normal view as peekview
Controller controller = new LLRunController(ureq, wControl, getModuleConfiguration(), this, userCourseEnv, false);
return controller;
}
/**
* @see org.olat.course.nodes.GenericCourseNode#createPreviewController(org.olat.core.gui.UserRequest,
* org.olat.core.gui.control.WindowControl,
* org.olat.course.run.userview.UserCourseEnvironment,
* org.olat.course.run.userview.NodeEvaluation)
*/
@Override
public Controller createPreviewController(UserRequest ureq, WindowControl wControl, UserCourseEnvironment userCourseEnv, NodeEvaluation ne) {
Controller controller = new LLRunController(ureq, wControl, getModuleConfiguration(), this, userCourseEnv, true);
controller = TitledWrapperHelper.getWrapper(ureq, wControl, controller, this, "o_ll_icon");
return controller;
}
/**
* {@inheritDoc}
*/
@Override
public StatusDescription[] isConfigValid(CourseEditorEnv cev) {
String translatorStr = Util.getPackageName(ConditionEditController.class);
List<StatusDescription> statusDescs = isConfigValidWithTranslator(cev, translatorStr, getConditionExpressions());
return StatusDescriptionHelper.sort(statusDescs);
}
/**
* {@inheritDoc}
*/
public RepositoryEntry getReferencedRepositoryEntry() {
return null;
}
/**
* {@inheritDoc}
*/
public StatusDescription isConfigValid() {
if (oneClickStatusCache != null) { return oneClickStatusCache[0]; }
StatusDescription sd = StatusDescription.NOERROR;
if (!LLEditController.isConfigValid(getModuleConfiguration())) {
String transPackage = Util.getPackageName(LLEditController.class);
sd = new StatusDescription(StatusDescription.WARNING, "config.nolinks.short", "config.nolinks.long", null, transPackage);
sd.setDescriptionForUnit(getIdent());
sd.setActivateableViewIdentifier(LLEditController.PANE_TAB_LLCONFIG);
}
return sd;
}
/**
* {@inheritDoc}
*/
public boolean needsReferenceToARepositoryEntry() {
return false;
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon.quickFix;
import com.intellij.codeInsight.daemon.impl.quickfix.RenameFileFix;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.ide.fileTemplates.FileTemplate;
import com.intellij.ide.fileTemplates.FileTemplateManager;
import com.intellij.ide.fileTemplates.actions.CreateFromTemplateActionBase;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.UnknownFileType;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReference;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReferenceSet;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.FileTargetContext;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
/**
* @author Maxim.Mossienko
*/
public class FileReferenceQuickFixProvider {
private FileReferenceQuickFixProvider() {}
@NotNull
public static List<? extends LocalQuickFix> registerQuickFix(@NotNull FileReference reference) {
final FileReferenceSet fileReferenceSet = reference.getFileReferenceSet();
int index = reference.getIndex();
if (index < 0) return emptyList();
final String newFileName = reference.getFileNameToCreate();
// check if we could create file
if (newFileName.isEmpty() ||
newFileName.indexOf('\\') != -1 ||
newFileName.indexOf('*') != -1 ||
newFileName.indexOf('?') != -1 ||
SystemInfo.isWindows && newFileName.indexOf(':') != -1) {
return emptyList();
}
PsiElement element = reference.getElement();
PsiFile containingFile = element.getContainingFile();
if (fileReferenceSet.isCaseSensitive()) {
PsiElement psiElement = containingFile == null ? null : reference.innerSingleResolve(false, containingFile);
if (psiElement != null) {
String existingElementName = ((PsiNamedElement)psiElement).getName();
RenameFileReferenceIntentionAction renameRefAction = new RenameFileReferenceIntentionAction(existingElementName, reference);
RenameFileFix renameFileFix = new RenameFileFix(newFileName);
return Arrays.asList(renameRefAction, renameFileFix);
}
}
if (reference.isLast()) {
NewFileLocation location = getNewFileLocation(reference, newFileName, containingFile, false);
if (location == null) return emptyList();
return singletonList(new MyCreateFileFix(element, location, reference.getNewFileTemplateName()));
}
else {
NewFileLocation location = getNewFileLocation(reference, newFileName, containingFile, true);
if (location == null) return emptyList();
return singletonList(new CreateDirectoryPathFix(element, location));
}
}
@Nullable
public static NewFileLocation getNewFileLocation(@NotNull FileReference reference,
String newFileName,
boolean isDirectory) {
return getNewFileLocation(reference, newFileName, reference.getElement().getContainingFile(), isDirectory);
}
@Nullable
private static NewFileLocation getNewFileLocation(@NotNull FileReference reference,
String newFileName,
PsiFile containingFile,
boolean isDirectory) {
@Nullable
Module module = ModuleUtilCore.findModuleForPsiElement(containingFile);
List<TargetDirectory> targetDirectories = getTargets(reference, module, newFileName, isDirectory);
if (targetDirectories.isEmpty()) {
return null;
}
return new NewFileLocation(targetDirectories, getPathToReferencePart(reference), newFileName);
}
@NotNull
private static List<TargetDirectory> getTargets(@NotNull FileReference reference,
@Nullable Module module,
String newFileName,
boolean isDirectory) {
List<FileTargetContext> contexts = getSuitableContexts(reference, module);
List<TargetDirectory> targetDirectories = new SmartList<>();
for (FileTargetContext targetContext : contexts) {
PsiFileSystemItem context = targetContext.getFileSystemItem();
VirtualFile virtualFile = context.getVirtualFile();
if (virtualFile == null || !virtualFile.isValid()) continue;
if (!isDirectory) {
FileType ft = FileTypeManager.getInstance().getFileTypeByFileName(newFileName);
if (ft instanceof UnknownFileType) continue;
}
PsiDirectory directory = context.getManager().findDirectory(virtualFile);
if (directory == null) continue;
if (!checkFileWriteAccess(reference, directory, targetContext.getPathToCreate(), newFileName, isDirectory)) {
continue;
}
if (module != null) {
targetDirectories.add(new TargetDirectory(directory, targetContext.getPathToCreate()));
}
else {
targetDirectories.add(new TargetDirectory(directory));
}
}
return targetDirectories;
}
private static boolean checkFileWriteAccess(FileReference reference,
PsiDirectory targetRoot,
String[] pathToCreate,
String newFileName,
boolean isDirectory) {
PsiDirectory currentDirectory = targetRoot;
for (String part : pathToCreate) {
PsiDirectory subDirectory = currentDirectory.findSubdirectory(part);
if (subDirectory == null) {
return checkCreateSubdirectory(currentDirectory, part);
}
currentDirectory = subDirectory;
}
if (reference.getIndex() > 0) {
FileReference[] references = reference.getFileReferenceSet().getAllReferences();
// check that we can create first unresolved directory
for (int i = 0; i < references.length - 1; i++) {
String part = references[i].getFileNameToCreate();
PsiDirectory subDirectory = currentDirectory.findSubdirectory(part);
if (subDirectory == null) {
return checkCreateSubdirectory(currentDirectory, part);
}
currentDirectory = subDirectory;
}
}
if (isDirectory) {
return checkCreateSubdirectory(currentDirectory, newFileName);
} else {
// if all directories exist check if we can create file in the last
return checkCreateFile(currentDirectory, newFileName);
}
}
private static boolean checkCreateFile(PsiDirectory directory, String newFileName) {
try {
directory.checkCreateFile(newFileName);
}
catch (IncorrectOperationException ex) {
return false;
}
return true;
}
private static boolean checkCreateSubdirectory(PsiDirectory directory, String part) {
try {
directory.checkCreateSubdirectory(part);
}
catch (IncorrectOperationException ex) {
return false;
}
// we assume that we will be able create the rest of file tree in a new directory
return true;
}
private static String @NotNull [] getPathToReferencePart(FileReference reference) {
if (reference.getIndex() == 0) {
return ArrayUtil.EMPTY_STRING_ARRAY;
}
FileReference[] references = reference.getFileReferenceSet().getAllReferences();
String[] path = new String[reference.getIndex()];
for (int i = 0; i < reference.getIndex(); i++) {
path[i] = references[i].getFileNameToCreate();
}
return path;
}
@NotNull
private static List<FileTargetContext> getSuitableContexts(@NotNull FileReference reference, @Nullable Module module) {
FileReferenceSet fileReferenceSet = reference.getFileReferenceSet();
Collection<FileTargetContext> targetContexts = fileReferenceSet.getTargetContexts();
if (targetContexts.isEmpty()) {
return emptyList();
}
SmartList<FileTargetContext> contexts = new SmartList<>();
for (FileTargetContext targetContext : targetContexts) {
PsiFileSystemItem fsContext = targetContext.getFileSystemItem();
if (module != null) {
if (module == getModuleForContext(fsContext)) {
contexts.add(targetContext);
}
}
else {
contexts.add(targetContext);
}
}
if (contexts.isEmpty() && ApplicationManager.getApplication().isUnitTestMode()) {
return singletonList(targetContexts.iterator().next());
}
return contexts;
}
@Nullable
private static Module getModuleForContext(@NotNull PsiFileSystemItem context) {
VirtualFile file = context.getVirtualFile();
return file != null ? ModuleUtilCore.findModuleForFile(file, context.getProject()) : null;
}
private static class MyCreateFileFix extends CreateFilePathFix {
private final String myNewFileTemplateName;
private MyCreateFileFix(@NotNull PsiElement psiElement,
@NotNull NewFileLocation newFileLocation,
@Nullable String newFileTemplateName) {
super(psiElement, newFileLocation);
myNewFileTemplateName = newFileTemplateName;
}
@Override
protected String getFileText() {
if (myNewFileTemplateName != null) {
Project project = getStartElement().getProject();
FileTemplateManager fileTemplateManager = FileTemplateManager.getInstance(project);
FileTemplate template = findTemplate(fileTemplateManager);
if (template != null) {
try {
return template.getText(fileTemplateManager.getDefaultProperties());
}
catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}
return super.getFileText();
}
private FileTemplate findTemplate(FileTemplateManager fileTemplateManager) {
FileTemplate template = fileTemplateManager.getTemplate(myNewFileTemplateName);
if (template == null) template = fileTemplateManager.findInternalTemplate(myNewFileTemplateName);
if (template == null) {
for (FileTemplate fileTemplate : fileTemplateManager.getAllJ2eeTemplates()) {
final String fileTemplateWithExtension = fileTemplate.getName() + '.' + fileTemplate.getExtension();
if (fileTemplateWithExtension.equals(myNewFileTemplateName)) {
return fileTemplate;
}
}
}
return template;
}
@Override
protected void openFile(@NotNull Project project, PsiDirectory directory, PsiFile newFile, String text) {
super.openFile(project, directory, newFile, text);
if (myNewFileTemplateName != null) {
FileTemplateManager fileTemplateManager = FileTemplateManager.getInstance(project);
FileTemplate template = findTemplate(fileTemplateManager);
if (template != null && template.isLiveTemplateEnabled()) {
CreateFromTemplateActionBase.startLiveTemplate(newFile);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.hcatalog.listener;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.metastore.TransactionalMetaStoreEventListener;
import org.apache.hadoop.hive.metastore.api.AddPackageRequest;
import org.apache.hadoop.hive.metastore.api.DropPackageRequest;
import org.apache.hadoop.hive.metastore.api.GetPackageRequest;
import org.apache.hadoop.hive.metastore.api.GetPartitionsFilterSpec;
import org.apache.hadoop.hive.metastore.api.GetProjectionsSpec;
import org.apache.hadoop.hive.metastore.api.ISchemaName;
import org.apache.hadoop.hive.metastore.api.ListPackageRequest;
import org.apache.hadoop.hive.metastore.api.ListStoredProcedureRequest;
import org.apache.hadoop.hive.metastore.api.Package;
import org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints;
import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor;
import org.apache.hadoop.hive.metastore.api.Catalog;
import org.apache.hadoop.hive.metastore.api.StoredProcedure;
import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.FileMetadataHandler;
import org.apache.hadoop.hive.metastore.ObjectStore;
import org.apache.hadoop.hive.metastore.RawStore;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.AggrStats;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.CreationMetadata;
import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
import org.apache.hadoop.hive.metastore.api.DataConnector;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
import org.apache.hadoop.hive.metastore.api.ISchema;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.NotificationEvent;
import org.apache.hadoop.hive.metastore.api.NotificationEventRequest;
import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
import org.apache.hadoop.hive.metastore.api.NotificationEventsCountRequest;
import org.apache.hadoop.hive.metastore.api.NotificationEventsCountResponse;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.PartitionEventType;
import org.apache.hadoop.hive.metastore.api.PartitionValuesResponse;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
import org.apache.hadoop.hive.metastore.api.SchemaVersion;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
import org.apache.hadoop.hive.metastore.api.WMTrigger;
import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
import org.apache.hadoop.hive.metastore.api.RuntimeStat;
import org.apache.hadoop.hive.metastore.api.SQLCheckConstraint;
import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint;
import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
import org.apache.hadoop.hive.metastore.api.ScheduledQuery;
import org.apache.hadoop.hive.metastore.api.ScheduledQueryKey;
import org.apache.hadoop.hive.metastore.api.ScheduledQueryMaintenanceRequest;
import org.apache.hadoop.hive.metastore.api.ScheduledQueryPollRequest;
import org.apache.hadoop.hive.metastore.api.ScheduledQueryPollResponse;
import org.apache.hadoop.hive.metastore.api.ScheduledQueryProgressInfo;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.TableMeta;
import org.apache.hadoop.hive.metastore.api.Type;
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
import org.apache.hadoop.hive.metastore.api.UnknownTableException;
import org.apache.hadoop.hive.metastore.api.WMMapping;
import org.apache.hadoop.hive.metastore.api.WMPool;
import org.apache.hadoop.hive.metastore.api.WMNullablePool;
import org.apache.hadoop.hive.metastore.api.WriteEventInfo;
import org.apache.hadoop.hive.metastore.api.ReplicationMetricList;
import org.apache.hadoop.hive.metastore.api.GetReplicationMetricsRequest;
import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils.ColStatsObjWithSourceInfo;
import org.apache.thrift.TException;
/**
* An implementation {@link org.apache.hadoop.hive.metastore.RawStore}
* with the ability to fail metastore events for the purpose of testing.
* Events are expected to succeed by default and simply delegate to an
* embedded ObjectStore object. The behavior can be changed based on
* a flag by calling setEventSucceed().
*
* Ideally, we should have just extended ObjectStore instead of using
* delegation. However, since HiveMetaStore uses a Proxy, this class must
* not inherit from any other class.
*/
public class DummyRawStoreFailEvent implements RawStore, Configurable {
private final ObjectStore objectStore;
public DummyRawStoreFailEvent() {
objectStore = new ObjectStore();
}
private static boolean shouldEventSucceed = true;
public static void setEventSucceed(boolean flag) {
shouldEventSucceed = flag;
}
@Override
public boolean commitTransaction() {
return objectStore.commitTransaction();
}
@Override
public boolean isActiveTransaction() {
return false;
}
@Override
public Configuration getConf() {
return objectStore.getConf();
}
@Override
public void setConf(Configuration conf) {
objectStore.setConf(conf);
}
@Override
public void shutdown() {
objectStore.shutdown();
}
@Override
public boolean openTransaction() {
return objectStore.openTransaction();
}
@Override
public void rollbackTransaction() {
objectStore.rollbackTransaction();
}
@Override
public void createCatalog(Catalog cat) throws MetaException {
if (shouldEventSucceed) {
objectStore.createCatalog(cat);
} else {
throw new RuntimeException("Failed event");
}
}
@Override
public void alterCatalog(String catName, Catalog cat) throws MetaException,
InvalidOperationException {
objectStore.alterCatalog(catName, cat);
}
@Override
public Catalog getCatalog(String catalogName) throws NoSuchObjectException, MetaException {
return objectStore.getCatalog(catalogName);
}
@Override
public List<String> getCatalogs() {
try {
return objectStore.getCatalogs();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
@Override
public void dropCatalog(String catalogName) throws NoSuchObjectException, MetaException {
if (shouldEventSucceed) {
objectStore.dropCatalog(catalogName);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public void createDatabase(Database db) throws InvalidObjectException, MetaException {
if (shouldEventSucceed) {
objectStore.createDatabase(db);
} else {
throw new RuntimeException("Failed event");
}
}
@Override
public Database getDatabase(String catName, String dbName) throws NoSuchObjectException {
return objectStore.getDatabase(catName, dbName);
}
@Override
public boolean dropDatabase(String catName, String dbName)
throws NoSuchObjectException, MetaException {
if (shouldEventSucceed) {
return objectStore.dropDatabase(catName, dbName);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public boolean alterDatabase(String catName, String dbName, Database db)
throws NoSuchObjectException, MetaException {
return objectStore.alterDatabase(catName, dbName, db);
}
@Override
public List<String> getDatabases(String catName, String pattern) throws MetaException {
return objectStore.getDatabases(catName, pattern);
}
@Override
public List<String> getAllDatabases(String catName) throws MetaException {
return objectStore.getAllDatabases(catName);
}
@Override
public List<String> getAllDataConnectorNames() throws MetaException {
return objectStore.getAllDataConnectorNames();
}
@Override
public DataConnector getDataConnector(String connectorName) throws NoSuchObjectException {
return objectStore.getDataConnector(connectorName);
}
@Override
public boolean alterDataConnector(String connectorName, DataConnector connector)
throws MetaException, NoSuchObjectException {
return objectStore.alterDataConnector(connectorName, connector);
}
@Override
public boolean dropDataConnector(String connector) throws MetaException, NoSuchObjectException {
return objectStore.dropDataConnector(connector);
}
@Override
public void createDataConnector(DataConnector connector) throws MetaException, InvalidObjectException {
objectStore.createDataConnector(connector);
}
@Override
public boolean createType(Type type) {
return objectStore.createType(type);
}
@Override
public Type getType(String typeName) {
return objectStore.getType(typeName);
}
@Override
public boolean dropType(String typeName) {
return objectStore.dropType(typeName);
}
@Override
public void createTable(Table tbl) throws InvalidObjectException, MetaException {
if (shouldEventSucceed) {
objectStore.createTable(tbl);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public boolean dropTable(String catName, String dbName, String tableName)
throws MetaException, NoSuchObjectException,
InvalidObjectException, InvalidInputException {
if (shouldEventSucceed) {
return objectStore.dropTable(catName, dbName, tableName);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public List<String> isPartOfMaterializedView(String catName, String dbName, String tblName) {
return objectStore.isPartOfMaterializedView(catName, dbName, tblName);
}
@Override
public Table getTable(String catName, String dbName, String tableName) throws MetaException {
return objectStore.getTable(catName, dbName, tableName);
}
@Override
public Table getTable(String catName, String dbName, String tableName,
String writeIdList) throws MetaException {
return objectStore.getTable(catName, dbName, tableName, writeIdList);
}
@Override
public Table getTable(String catalogName, String dbName, String tableName, String writeIdList, long tableId)
throws MetaException {
return objectStore.getTable(catalogName, dbName, tableName, writeIdList, tableId);
}
@Override
public boolean addPartition(Partition part)
throws InvalidObjectException, MetaException {
return objectStore.addPartition(part);
}
@Override
public Partition getPartition(String catName, String dbName, String tableName, List<String> partVals)
throws MetaException, NoSuchObjectException {
return objectStore.getPartition(catName, dbName, tableName, partVals);
}
@Override
public Partition getPartition(String catName, String dbName, String tableName,
List<String> partVals, String writeIdList)
throws MetaException, NoSuchObjectException {
return objectStore.getPartition(catName, dbName, tableName, partVals, writeIdList);
}
@Override
public boolean dropPartition(String catName, String dbName, String tableName, List<String> partVals)
throws MetaException, NoSuchObjectException,
InvalidObjectException, InvalidInputException {
if (shouldEventSucceed) {
return objectStore.dropPartition(catName, dbName, tableName, partVals);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public List<Partition> getPartitions(String catName, String dbName, String tableName, int max)
throws MetaException, NoSuchObjectException {
return objectStore.getPartitions(catName, dbName, tableName, max);
}
@Override
public Map<String, String> getPartitionLocations(String catName, String dbName, String tblName,
String baseLocationToNotShow, int max) {
return objectStore.getPartitionLocations(catName, dbName, tblName, baseLocationToNotShow, max);
}
@Override
public void updateCreationMetadata(String catName, String dbname, String tablename, CreationMetadata cm)
throws MetaException {
objectStore.updateCreationMetadata(catName, dbname, tablename, cm);
}
@Override
public Table alterTable(String catName, String dbName, String name, Table newTable, String queryValidWriteIds)
throws InvalidObjectException, MetaException {
if (shouldEventSucceed) {
return objectStore.alterTable(catName, dbName, name, newTable, queryValidWriteIds);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public List<String> getTables(String catName, String dbName, String pattern) throws MetaException {
return objectStore.getTables(catName, dbName, pattern);
}
@Override
public List<String> getTables(String catName, String dbName, String pattern, TableType tableType, int limit) throws MetaException {
return objectStore.getTables(catName, dbName, pattern, tableType, limit);
}
@Override
public List<Table> getAllMaterializedViewObjectsForRewriting(String catName)
throws MetaException {
return objectStore.getAllMaterializedViewObjectsForRewriting(catName);
}
@Override
public List<String> getMaterializedViewsForRewriting(String catName, String dbName)
throws MetaException, NoSuchObjectException {
return objectStore.getMaterializedViewsForRewriting(catName, dbName);
}
@Override
public List<TableMeta> getTableMeta(String catName, String dbNames, String tableNames, List<String> tableTypes)
throws MetaException {
return objectStore.getTableMeta(catName, dbNames, tableNames, tableTypes);
}
@Override
public List<Table> getTableObjectsByName(String catName, String dbName, List<String> tableNames)
throws MetaException, UnknownDBException {
return objectStore.getTableObjectsByName(catName, dbName, tableNames);
}
@Override
public List<Table> getTableObjectsByName(String catName, String dbName, List<String> tableNames,
GetProjectionsSpec projectionSpec, String tablePattern) throws MetaException, UnknownDBException {
return objectStore.getTableObjectsByName(catName, dbName, tableNames, projectionSpec, tablePattern);
}
@Override
public List<String> getAllTables(String catName, String dbName) throws MetaException {
return objectStore.getAllTables(catName, dbName);
}
@Override
public List<String> listTableNamesByFilter(String catName, String dbName, String filter,
short maxTables) throws MetaException, UnknownDBException {
return objectStore.listTableNamesByFilter(catName, dbName, filter, maxTables);
}
@Override
public List<String> listPartitionNames(String catName, String dbName, String tblName, short maxParts)
throws MetaException {
return objectStore.listPartitionNames(catName, dbName, tblName, maxParts);
}
@Override
public List<String> listPartitionNames(String catName, String dbName, String tblName,
String defaultPartName, byte[] exprBytes, String order,
short maxParts) throws MetaException, NoSuchObjectException {
return objectStore.listPartitionNames(catName, dbName, tblName,
defaultPartName, exprBytes, order, maxParts);
}
@Override
public PartitionValuesResponse listPartitionValues(String catName, String db_name,
String tbl_name, List<FieldSchema> cols,
boolean applyDistinct, String filter,
boolean ascending, List<FieldSchema> order,
long maxParts) throws MetaException {
return null;
}
@Override
public Partition alterPartition(String catName, String dbName, String tblName, List<String> partVals,
Partition newPart, String queryValidWriteIds) throws InvalidObjectException, MetaException {
if (shouldEventSucceed) {
return objectStore.alterPartition(catName, dbName, tblName, partVals, newPart, queryValidWriteIds);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public List<Partition> alterPartitions(String catName, String dbName, String tblName,
List<List<String>> partValsList, List<Partition> newParts,
long writeId, String queryValidWriteIds)
throws InvalidObjectException, MetaException {
if (shouldEventSucceed) {
return objectStore.alterPartitions(catName, dbName, tblName, partValsList, newParts, writeId, queryValidWriteIds);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public List<Partition> getPartitionsByFilter(String catName, String dbName, String tblName,
String filter, short maxParts) throws MetaException, NoSuchObjectException {
return objectStore.getPartitionsByFilter(catName, dbName, tblName, filter, maxParts);
}
@Override
public List<Partition> getPartitionSpecsByFilterAndProjection(Table table,
GetProjectionsSpec projectionSpec, GetPartitionsFilterSpec filterSpec)
throws MetaException, NoSuchObjectException {
return objectStore.getPartitionSpecsByFilterAndProjection(table, projectionSpec, filterSpec);
}
@Override
public int getNumPartitionsByFilter(String catName, String dbName, String tblName,
String filter) throws MetaException, NoSuchObjectException {
return objectStore.getNumPartitionsByFilter(catName, dbName, tblName, filter);
}
@Override
public int getNumPartitionsByExpr(String catName, String dbName, String tblName,
byte[] expr) throws MetaException, NoSuchObjectException {
return objectStore.getNumPartitionsByExpr(catName, dbName, tblName, expr);
}
@Override
public List<Partition> getPartitionsByNames(String catName, String dbName, String tblName,
List<String> partNames)
throws MetaException, NoSuchObjectException {
return objectStore.getPartitionsByNames(
catName, dbName, tblName, partNames);
}
@Override
public boolean getPartitionsByExpr(String catName, String dbName, String tblName, byte[] expr,
String defaultPartitionName, short maxParts, List<Partition> result) throws TException {
return objectStore.getPartitionsByExpr(catName,
dbName, tblName, expr, defaultPartitionName, maxParts, result);
}
@Override
public Table markPartitionForEvent(String catName, String dbName, String tblName,
Map<String, String> partVals, PartitionEventType evtType)
throws MetaException, UnknownTableException, InvalidPartitionException,
UnknownPartitionException {
return objectStore.markPartitionForEvent(catName, dbName, tblName, partVals, evtType);
}
@Override
public boolean isPartitionMarkedForEvent(String catName, String dbName, String tblName,
Map<String, String> partName, PartitionEventType evtType)
throws MetaException, UnknownTableException, InvalidPartitionException,
UnknownPartitionException {
return objectStore.isPartitionMarkedForEvent(catName, dbName, tblName, partName, evtType);
}
@Override
public boolean addRole(String rowName, String ownerName) throws InvalidObjectException,
MetaException, NoSuchObjectException {
return objectStore.addRole(rowName, ownerName);
}
@Override
public boolean removeRole(String roleName)
throws MetaException, NoSuchObjectException {
return objectStore.removeRole(roleName);
}
@Override
public boolean grantRole(Role role, String userName, PrincipalType principalType,
String grantor, PrincipalType grantorType, boolean grantOption)
throws MetaException, NoSuchObjectException, InvalidObjectException {
return objectStore.grantRole(role, userName, principalType, grantor, grantorType,
grantOption);
}
@Override
public boolean revokeRole(Role role, String userName, PrincipalType principalType, boolean grantOption)
throws MetaException, NoSuchObjectException {
return objectStore.revokeRole(role, userName, principalType, grantOption);
}
@Override
public PrincipalPrivilegeSet getUserPrivilegeSet(String userName,
List<String> groupNames) throws InvalidObjectException, MetaException {
return objectStore.getUserPrivilegeSet(userName, groupNames);
}
@Override
public PrincipalPrivilegeSet getDBPrivilegeSet(String catName, String dbName, String userName,
List<String> groupNames) throws InvalidObjectException, MetaException {
return objectStore.getDBPrivilegeSet(catName, dbName, userName, groupNames);
}
@Override
public PrincipalPrivilegeSet getTablePrivilegeSet(String catName, String dbName, String tableName,
String userName, List<String> groupNames)
throws InvalidObjectException, MetaException {
return objectStore.getTablePrivilegeSet(catName, dbName, tableName, userName, groupNames);
}
@Override
public PrincipalPrivilegeSet getPartitionPrivilegeSet(String catName, String dbName, String tableName,
String partition, String userName, List<String> groupNames)
throws InvalidObjectException, MetaException {
return objectStore.getPartitionPrivilegeSet(catName, dbName, tableName, partition,
userName, groupNames);
}
@Override
public PrincipalPrivilegeSet getColumnPrivilegeSet(String catName, String dbName, String tableName,
String partitionName, String columnName, String userName,
List<String> groupNames)
throws InvalidObjectException, MetaException {
return objectStore.getColumnPrivilegeSet(catName, dbName, tableName, partitionName,
columnName, userName, groupNames);
}
@Override
public List<HiveObjectPrivilege> listPrincipalGlobalGrants(String principalName,
PrincipalType principalType) {
return objectStore.listPrincipalGlobalGrants(principalName, principalType);
}
@Override
public List<HiveObjectPrivilege> listPrincipalDBGrants(String principalName,
PrincipalType principalType, String catName, String dbName) {
return objectStore.listPrincipalDBGrants(principalName, principalType, catName, dbName);
}
@Override
public List<HiveObjectPrivilege> listAllTableGrants(String principalName,
PrincipalType principalType, String catName, String dbName, String tableName) {
return objectStore.listAllTableGrants(principalName, principalType,
catName, dbName, tableName);
}
@Override
public List<HiveObjectPrivilege> listPrincipalPartitionGrants(String principalName,
PrincipalType principalType, String catName, String dbName, String tableName,
List<String> partValues,
String partName) {
return objectStore.listPrincipalPartitionGrants(principalName, principalType,
catName, dbName, tableName, partValues, partName);
}
@Override
public List<HiveObjectPrivilege> listPrincipalTableColumnGrants(String principalName,
PrincipalType principalType, String catName, String dbName,
String tableName, String columnName) {
return objectStore.listPrincipalTableColumnGrants(principalName, principalType,
catName, dbName, tableName, columnName);
}
@Override
public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrants(
String principalName, PrincipalType principalType, String catName, String dbName, String tableName,
List<String> partVals, String partName, String columnName) {
return objectStore.listPrincipalPartitionColumnGrants(principalName, principalType,
catName, dbName, tableName, partVals, partName, columnName);
}
@Override
public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectException,
MetaException, NoSuchObjectException {
return objectStore.grantPrivileges(privileges);
}
@Override
public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption)
throws InvalidObjectException, MetaException, NoSuchObjectException {
return objectStore.revokePrivileges(privileges, grantOption);
}
@Override
public boolean refreshPrivileges(HiveObjectRef objToRefresh, String authorizer, PrivilegeBag grantPrivileges)
throws InvalidObjectException, MetaException, NoSuchObjectException {
return objectStore.refreshPrivileges(objToRefresh, authorizer, grantPrivileges);
}
@Override
public Role getRole(String roleName) throws NoSuchObjectException {
return objectStore.getRole(roleName);
}
@Override
public List<String> listRoleNames() {
return objectStore.listRoleNames();
}
@Override
public List<Role> listRoles(String principalName, PrincipalType principalType) {
return objectStore.listRoles(principalName, principalType);
}
@Override
public List<RolePrincipalGrant> listRolesWithGrants(String principalName,
PrincipalType principalType) {
return objectStore.listRolesWithGrants(principalName, principalType);
}
@Override
public List<RolePrincipalGrant> listRoleMembers(String roleName) {
return objectStore.listRoleMembers(roleName);
}
@Override
public Partition getPartitionWithAuth(String catName, String dbName, String tblName,
List<String> partVals, String userName, List<String> groupNames)
throws MetaException, NoSuchObjectException, InvalidObjectException {
return objectStore.getPartitionWithAuth(catName, dbName, tblName, partVals, userName,
groupNames);
}
@Override
public List<Partition> getPartitionsWithAuth(String catName, String dbName, String tblName,
short maxParts, String userName, List<String> groupNames)
throws MetaException, NoSuchObjectException, InvalidObjectException {
return objectStore.getPartitionsWithAuth(catName, dbName, tblName, maxParts, userName,
groupNames);
}
@Override
public List<String> listPartitionNamesPs(String catName, String dbName, String tblName,
List<String> partVals, short maxParts)
throws MetaException, NoSuchObjectException {
return objectStore.listPartitionNamesPs(catName, dbName, tblName, partVals, maxParts);
}
@Override
public List<Partition> listPartitionsPsWithAuth(String catName, String dbName, String tblName,
List<String> partVals, short maxParts, String userName,
List<String> groupNames)
throws MetaException, InvalidObjectException, NoSuchObjectException {
return objectStore.listPartitionsPsWithAuth(catName, dbName, tblName, partVals, maxParts,
userName, groupNames);
}
@Override
public long cleanupEvents() {
return objectStore.cleanupEvents();
}
@Override
public List<HiveObjectPrivilege> listPrincipalDBGrantsAll(
String principalName, PrincipalType principalType) {
return objectStore.listPrincipalDBGrantsAll(principalName, principalType);
}
@Override
public List<HiveObjectPrivilege> listPrincipalTableGrantsAll(
String principalName, PrincipalType principalType) {
return objectStore.listPrincipalTableGrantsAll(principalName, principalType);
}
@Override
public List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll(
String principalName, PrincipalType principalType) {
return objectStore.listPrincipalPartitionGrantsAll(principalName, principalType);
}
@Override
public List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll(
String principalName, PrincipalType principalType) {
return objectStore.listPrincipalTableColumnGrantsAll(principalName, principalType);
}
@Override
public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll(
String principalName, PrincipalType principalType) {
return objectStore.listPrincipalPartitionColumnGrantsAll(principalName, principalType);
}
@Override
public List<HiveObjectPrivilege> listGlobalGrantsAll() {
return objectStore.listGlobalGrantsAll();
}
@Override
public List<HiveObjectPrivilege> listDBGrantsAll(String catName, String dbName) {
return objectStore.listDBGrantsAll(catName, dbName);
}
@Override
public List<HiveObjectPrivilege> listPartitionColumnGrantsAll(String catName, String dbName, String tableName,
String partitionName, String columnName) {
return objectStore.listPartitionColumnGrantsAll(catName, dbName, tableName, partitionName, columnName);
}
@Override
public List<HiveObjectPrivilege> listTableGrantsAll(String catName, String dbName, String tableName) {
return objectStore.listTableGrantsAll(catName, dbName, tableName);
}
@Override
public List<HiveObjectPrivilege> listPartitionGrantsAll(String catName, String dbName, String tableName,
String partitionName) {
return objectStore.listPartitionGrantsAll(catName, dbName, tableName, partitionName);
}
@Override
public List<HiveObjectPrivilege> listTableColumnGrantsAll(String catName, String dbName, String tableName,
String columnName) {
return objectStore.listTableColumnGrantsAll(catName, dbName, tableName, columnName);
}
@Override
public List<ColumnStatistics> getTableColumnStatistics(String catName, String dbName, String tableName,
List<String> colNames) throws MetaException, NoSuchObjectException {
return objectStore.getTableColumnStatistics(catName, dbName, tableName, colNames);
}
@Override
public ColumnStatistics getTableColumnStatistics(String catName, String dbName, String tableName,
List<String> colNames, String engine) throws MetaException, NoSuchObjectException {
return objectStore.getTableColumnStatistics(catName, dbName, tableName, colNames, engine);
}
@Override
public ColumnStatistics getTableColumnStatistics(String catName, String dbName, String tableName,
List<String> colNames, String engine,
String writeIdList)
throws MetaException, NoSuchObjectException {
return objectStore.getTableColumnStatistics(catName, dbName, tableName, colNames, engine, writeIdList);
}
@Override
public boolean deleteTableColumnStatistics(String catName, String dbName, String tableName,
String colName, String engine)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
return objectStore.deleteTableColumnStatistics(catName, dbName, tableName, colName, engine);
}
@Override
public boolean deletePartitionColumnStatistics(String catName, String dbName, String tableName,
String partName, List<String> partVals, String colName, String engine)
throws NoSuchObjectException, MetaException, InvalidObjectException,
InvalidInputException {
return objectStore.deletePartitionColumnStatistics(catName, dbName, tableName, partName,
partVals, colName, engine);
}
@Override
public Map<String, String> updateTableColumnStatistics(ColumnStatistics statsObj, String validWriteIds, long writeId)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
return objectStore.updateTableColumnStatistics(statsObj, validWriteIds, writeId);
}
@Override
public Map<String, String> updatePartitionColumnStatistics(ColumnStatistics statsObj,
List<String> partVals, String validWriteIds, long writeId)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
return objectStore.updatePartitionColumnStatistics(statsObj, partVals, validWriteIds, writeId);
}
@Override
public boolean addToken(String tokenIdentifier, String delegationToken) {
return false;
}
@Override
public boolean removeToken(String tokenIdentifier) {
return false;
}
@Override
public String getToken(String tokenIdentifier) {
return "";
}
@Override
public List<String> getAllTokenIdentifiers() {
return new ArrayList<>();
}
@Override
public int addMasterKey(String key) throws MetaException {
return -1;
}
@Override
public void updateMasterKey(Integer seqNo, String key)
throws NoSuchObjectException, MetaException {}
@Override
public boolean removeMasterKey(Integer keySeq) {
return false;
}
@Override
public String[] getMasterKeys() {
return new String[0];
}
@Override
public void verifySchema() throws MetaException {
}
@Override
public String getMetaStoreSchemaVersion() throws MetaException {
return objectStore.getMetaStoreSchemaVersion();
}
@Override
public void setMetaStoreSchemaVersion(String schemaVersion, String comment) throws MetaException {
objectStore.setMetaStoreSchemaVersion(schemaVersion, comment);
}
@Override
public List<List<ColumnStatistics>> getPartitionColumnStatistics(String catName, String dbName,
String tblName, List<String> colNames,
List<String> partNames)
throws MetaException, NoSuchObjectException {
return objectStore.getPartitionColumnStatistics(catName, dbName, tblName , colNames, partNames);
}
@Override
public List<ColumnStatistics> getPartitionColumnStatistics(String catName, String dbName,
String tblName, List<String> colNames,
List<String> partNames, String engine)
throws MetaException, NoSuchObjectException {
return objectStore.getPartitionColumnStatistics(catName, dbName, tblName , colNames, partNames, engine);
}
@Override
public List<ColumnStatistics> getPartitionColumnStatistics(String catName, String dbName,
String tblName, List<String> colNames,
List<String> partNames, String engine,
String writeIdList)
throws MetaException, NoSuchObjectException {
return objectStore.getPartitionColumnStatistics(
catName, dbName, tblName , colNames, partNames, engine, writeIdList);
}
@Override
public boolean doesPartitionExist(String catName, String dbName, String tableName,
List<FieldSchema> partKeys, List<String> partVals)
throws MetaException, NoSuchObjectException {
return objectStore.doesPartitionExist(catName, dbName, tableName, partKeys, partVals);
}
@Override
public boolean addPartitions(String catName, String dbName, String tblName, List<Partition> parts)
throws InvalidObjectException, MetaException {
return objectStore.addPartitions(catName, dbName, tblName, parts);
}
@Override
public boolean addPartitions(String catName, String dbName, String tblName, PartitionSpecProxy partitionSpec,
boolean ifNotExists) throws InvalidObjectException, MetaException {
return false;
}
@Override
public void dropPartitions(String catName, String dbName, String tblName, List<String> partNames)
throws MetaException, NoSuchObjectException {
objectStore.dropPartitions(catName, dbName, tblName, partNames);
}
@Override
public void createFunction(Function func) throws InvalidObjectException,
MetaException {
if (shouldEventSucceed) {
objectStore.createFunction(func);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public void alterFunction(String catName, String dbName, String funcName, Function newFunction)
throws InvalidObjectException, MetaException {
objectStore.alterFunction(catName, dbName, funcName, newFunction);
}
@Override
public void dropFunction(String catName, String dbName, String funcName)
throws MetaException, NoSuchObjectException, InvalidObjectException,
InvalidInputException {
if (shouldEventSucceed) {
objectStore.dropFunction(catName, dbName, funcName);
} else {
throw new RuntimeException("Event failed.");
}
}
@Override
public Function getFunction(String catName, String dbName, String funcName)
throws MetaException {
return objectStore.getFunction(catName, dbName, funcName);
}
@Override
public List<Function> getAllFunctions(String catName)
throws MetaException {
return Collections.emptyList();
}
@Override
public List<String> getFunctions(String catName, String dbName, String pattern)
throws MetaException {
return objectStore.getFunctions(catName, dbName, pattern);
}
@Override
public AggrStats get_aggr_stats_for(String catName, String dbName,
String tblName, List<String> partNames, List<String> colNames,
String engine)
throws MetaException {
return null;
}
@Override
public AggrStats get_aggr_stats_for(String catName, String dbName,
String tblName, List<String> partNames, List<String> colNames,
String engine, String writeIdList)
throws MetaException {
return null;
}
@Override
public NotificationEventResponse getNextNotification(NotificationEventRequest rqst) {
return objectStore.getNextNotification(rqst);
}
@Override
public void addNotificationEvent(NotificationEvent event) throws MetaException {
objectStore.addNotificationEvent(event);
}
@Override
public void cleanNotificationEvents(int olderThan) {
if (!shouldEventSucceed) {
//throw exception to simulate an issue with cleaner thread
throw new RuntimeException("Dummy exception while cleaning notifications");
}
objectStore.cleanNotificationEvents(olderThan);
}
@Override
public void cleanWriteNotificationEvents(int olderThan) {
if (!shouldEventSucceed) {
//throw exception to simulate an issue with cleaner thread
throw new RuntimeException("Dummy exception while cleaning write notifications");
}
objectStore.cleanWriteNotificationEvents(olderThan);
}
@Override
public List<WriteEventInfo> getAllWriteEventInfo(long txnId, String dbName, String tableName) throws MetaException {
return objectStore.getAllWriteEventInfo(txnId, dbName, tableName);
}
@Override
public CurrentNotificationEventId getCurrentNotificationEventId() {
return objectStore.getCurrentNotificationEventId();
}
@Override
public NotificationEventsCountResponse getNotificationEventsCount(NotificationEventsCountRequest rqst) {
return objectStore.getNotificationEventsCount(rqst);
}
@Override
public void flushCache() {
objectStore.flushCache();
}
@Override
public ByteBuffer[] getFileMetadata(List<Long> fileIds) {
return null;
}
@Override
public void putFileMetadata(
List<Long> fileIds, List<ByteBuffer> metadata, FileMetadataExprType type) {
}
@Override
public boolean isFileMetadataSupported() {
return false;
}
@Override
public void getFileMetadataByExpr(List<Long> fileIds, FileMetadataExprType type, byte[] expr,
ByteBuffer[] metadatas, ByteBuffer[] stripeBitsets, boolean[] eliminated) {
}
@Override
public int getTableCount() throws MetaException {
return objectStore.getTableCount();
}
@Override
public int getPartitionCount() throws MetaException {
return objectStore.getPartitionCount();
}
@Override
public int getDatabaseCount() throws MetaException {
return objectStore.getDatabaseCount();
}
@Override
public FileMetadataHandler getFileMetadataHandler(FileMetadataExprType type) {
return null;
}
@Override
public List<SQLPrimaryKey> getPrimaryKeys(String catName, String db_name, String tbl_name)
throws MetaException {
return null;
}
@Override
public List<SQLForeignKey> getForeignKeys(String catName, String parent_db_name,
String parent_tbl_name, String foreign_db_name, String foreign_tbl_name)
throws MetaException {
return null;
}
@Override
public List<SQLUniqueConstraint> getUniqueConstraints(String catName, String db_name, String tbl_name)
throws MetaException {
return null;
}
@Override
public List<SQLNotNullConstraint> getNotNullConstraints(String catName, String db_name, String tbl_name)
throws MetaException {
return null;
}
@Override
public List<SQLCheckConstraint> getCheckConstraints(String catName, String db_name, String tbl_name)
throws MetaException {
return null;
}
@Override
public List<SQLDefaultConstraint> getDefaultConstraints(String catName, String db_name, String tbl_name)
throws MetaException {
return null;
}
@Override
public SQLAllTableConstraints getAllTableConstraints(String catName, String dbName, String tblName)
throws MetaException, NoSuchObjectException {
return null;
}
@Override
public SQLAllTableConstraints createTableWithConstraints(Table tbl,
SQLAllTableConstraints constraints)
throws InvalidObjectException, MetaException {
return null;
}
@Override
public void dropConstraint(String catName, String dbName, String tableName,
String constraintName, boolean missingOk)
throws NoSuchObjectException {
}
@Override
public List<SQLPrimaryKey> addPrimaryKeys(List<SQLPrimaryKey> pks)
throws InvalidObjectException, MetaException {
return null;
}
@Override
public List<SQLForeignKey> addForeignKeys(List<SQLForeignKey> fks)
throws InvalidObjectException, MetaException {
return null;
}
@Override
public List<SQLUniqueConstraint> addUniqueConstraints(List<SQLUniqueConstraint> uks)
throws InvalidObjectException, MetaException {
return null;
}
@Override
public List<SQLNotNullConstraint> addNotNullConstraints(List<SQLNotNullConstraint> nns)
throws InvalidObjectException, MetaException {
return null;
}
@Override
public List<SQLDefaultConstraint> addDefaultConstraints(List<SQLDefaultConstraint> nns)
throws InvalidObjectException, MetaException {
return null;
}
@Override
public List<SQLCheckConstraint> addCheckConstraints(List<SQLCheckConstraint> nns)
throws InvalidObjectException, MetaException {
return null;
}
@Override
public String getMetastoreDbUuid() throws MetaException {
throw new MetaException("getMetastoreDbUuid is not implemented");
}
@Override
public void createResourcePlan(WMResourcePlan resourcePlan, String copyFrom, int defaultPoolSize)
throws AlreadyExistsException, MetaException, InvalidObjectException, NoSuchObjectException {
objectStore.createResourcePlan(resourcePlan, copyFrom, defaultPoolSize);
}
@Override
public WMFullResourcePlan getResourcePlan(String name, String ns) throws NoSuchObjectException, MetaException {
return objectStore.getResourcePlan(name, ns);
}
@Override
public List<WMResourcePlan> getAllResourcePlans(String ns) throws MetaException {
return objectStore.getAllResourcePlans(ns);
}
@Override
public WMFullResourcePlan alterResourcePlan(String name, String ns, WMNullableResourcePlan resourcePlan,
boolean canActivateDisabled, boolean canDeactivate, boolean isReplace)
throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, MetaException {
return objectStore.alterResourcePlan(name, ns, resourcePlan, canActivateDisabled, canDeactivate, isReplace);
}
@Override
public WMFullResourcePlan getActiveResourcePlan(String ns) throws MetaException {
return objectStore.getActiveResourcePlan(ns);
}
@Override
public WMValidateResourcePlanResponse validateResourcePlan(String name, String ns)
throws NoSuchObjectException, InvalidObjectException, MetaException {
return objectStore.validateResourcePlan(name, ns);
}
@Override
public void dropResourcePlan(String name, String ns) throws NoSuchObjectException, MetaException {
objectStore.dropResourcePlan(name, ns);
}
@Override
public void createWMTrigger(WMTrigger trigger)
throws AlreadyExistsException, MetaException, NoSuchObjectException,
InvalidOperationException {
objectStore.createWMTrigger(trigger);
}
@Override
public void alterWMTrigger(WMTrigger trigger)
throws NoSuchObjectException, InvalidOperationException, MetaException {
objectStore.alterWMTrigger(trigger);
}
@Override
public void dropWMTrigger(String resourcePlanName, String triggerName, String ns)
throws NoSuchObjectException, InvalidOperationException, MetaException {
objectStore.dropWMTrigger(resourcePlanName, triggerName, ns);
}
@Override
public List<WMTrigger> getTriggersForResourcePlan(String resourcePlanName, String ns)
throws NoSuchObjectException, MetaException {
return objectStore.getTriggersForResourcePlan(resourcePlanName, ns);
}
@Override
public void createPool(WMPool pool) throws AlreadyExistsException, NoSuchObjectException,
InvalidOperationException, MetaException {
objectStore.createPool(pool);
}
@Override
public void alterPool(WMNullablePool pool, String poolPath) throws AlreadyExistsException,
NoSuchObjectException, InvalidOperationException, MetaException {
objectStore.alterPool(pool, poolPath);
}
@Override
public void dropWMPool(String resourcePlanName, String poolPath, String ns)
throws NoSuchObjectException, InvalidOperationException, MetaException {
objectStore.dropWMPool(resourcePlanName, poolPath, ns);
}
@Override
public void createOrUpdateWMMapping(WMMapping mapping, boolean update)
throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException,
MetaException {
objectStore.createOrUpdateWMMapping(mapping, update);
}
@Override
public void dropWMMapping(WMMapping mapping)
throws NoSuchObjectException, InvalidOperationException, MetaException {
objectStore.dropWMMapping(mapping);
}
@Override
public void createWMTriggerToPoolMapping(String resourcePlanName, String triggerName,
String poolPath, String ns) throws AlreadyExistsException, NoSuchObjectException,
InvalidOperationException, MetaException {
objectStore.createWMTriggerToPoolMapping(resourcePlanName, triggerName, poolPath, ns);
}
@Override
public void dropWMTriggerToPoolMapping(String resourcePlanName, String triggerName,
String poolPath, String ns) throws NoSuchObjectException, InvalidOperationException, MetaException {
objectStore.dropWMTriggerToPoolMapping(resourcePlanName, triggerName, poolPath, ns);
}
@Override
public List<ColStatsObjWithSourceInfo> getPartitionColStatsForDatabase(String catName, String dbName)
throws MetaException, NoSuchObjectException {
// TODO Auto-generated method stub
return null;
}
@Override
public void createISchema(ISchema schema) throws AlreadyExistsException, MetaException,
NoSuchObjectException {
objectStore.createISchema(schema);
}
@Override
public void alterISchema(ISchemaName schemaName, ISchema newSchema) throws NoSuchObjectException,
MetaException {
objectStore.alterISchema(schemaName, newSchema);
}
@Override
public ISchema getISchema(ISchemaName schemaName) throws MetaException {
return objectStore.getISchema(schemaName);
}
@Override
public void dropISchema(ISchemaName schemaName) throws NoSuchObjectException, MetaException {
objectStore.dropISchema(schemaName);
}
@Override
public void addSchemaVersion(SchemaVersion schemaVersion) throws AlreadyExistsException,
InvalidObjectException, NoSuchObjectException, MetaException {
objectStore.addSchemaVersion(schemaVersion);
}
@Override
public void alterSchemaVersion(SchemaVersionDescriptor version, SchemaVersion newVersion) throws
NoSuchObjectException, MetaException {
objectStore.alterSchemaVersion(version, newVersion);
}
@Override
public SchemaVersion getSchemaVersion(SchemaVersionDescriptor version) throws MetaException {
return objectStore.getSchemaVersion(version);
}
@Override
public SchemaVersion getLatestSchemaVersion(ISchemaName schemaName) throws MetaException {
return objectStore.getLatestSchemaVersion(schemaName);
}
@Override
public List<SchemaVersion> getAllSchemaVersion(ISchemaName schemaName) throws MetaException {
return objectStore.getAllSchemaVersion(schemaName);
}
@Override
public List<SchemaVersion> getSchemaVersionsByColumns(String colName, String colNamespace,
String type) throws MetaException {
return objectStore.getSchemaVersionsByColumns(colName, colNamespace, type);
}
@Override
public void dropSchemaVersion(SchemaVersionDescriptor version) throws NoSuchObjectException,
MetaException {
objectStore.dropSchemaVersion(version);
}
@Override
public SerDeInfo getSerDeInfo(String serDeName) throws NoSuchObjectException, MetaException {
return objectStore.getSerDeInfo(serDeName);
}
@Override
public void addSerde(SerDeInfo serde) throws AlreadyExistsException, MetaException {
objectStore.addSerde(serde);
}
@Override
public void addRuntimeStat(RuntimeStat stat) throws MetaException {
objectStore.addRuntimeStat(stat);
}
@Override
public List<RuntimeStat> getRuntimeStats(int maxEntries, int maxCreateTime) throws MetaException {
return objectStore.getRuntimeStats(maxEntries, maxCreateTime);
}
@Override
public int deleteRuntimeStats(int maxRetainSecs) throws MetaException {
return objectStore.deleteRuntimeStats(maxRetainSecs);
}
@Override
public List<TableName> getTableNamesWithStats() throws MetaException,
NoSuchObjectException {
return null;
}
@Override
public List<TableName> getAllTableNamesForStats() throws MetaException,
NoSuchObjectException {
return null;
}
@Override
public Map<String, List<String>> getPartitionColsWithStats(String catName,
String dbName, String tableName) throws MetaException,
NoSuchObjectException {
return null;
}
@Override
public ScheduledQueryPollResponse scheduledQueryPoll(ScheduledQueryPollRequest request) throws MetaException {
throw new RuntimeException("unimplemented");
}
@Override
public void scheduledQueryMaintenance(ScheduledQueryMaintenanceRequest request)
throws MetaException, NoSuchObjectException, AlreadyExistsException, InvalidInputException {
throw new RuntimeException("unimplemented");
}
@Override
public void scheduledQueryProgress(ScheduledQueryProgressInfo info)
throws MetaException, NoSuchObjectException, InvalidOperationException {
throw new RuntimeException("unimplemented");
}
@Override
public void addReplicationMetrics(ReplicationMetricList replicationMetricList) {
throw new RuntimeException("unimplemented");
}
@Override
public ReplicationMetricList getReplicationMetrics(GetReplicationMetricsRequest replicationMetricsRequest) {
throw new RuntimeException("unimplemented");
}
@Override
public int deleteReplicationMetrics(int maxRetainSecs) {
return objectStore.deleteReplicationMetrics(maxRetainSecs);
}
@Override
public ScheduledQuery getScheduledQuery(ScheduledQueryKey scheduleKey) throws MetaException, NoSuchObjectException {
throw new RuntimeException("unimplemented");
}
@Override
public int deleteScheduledExecutions(int maxRetainSecs) {
return objectStore.deleteScheduledExecutions(maxRetainSecs);
}
@Override
public int markScheduledExecutionsTimedOut(int timeoutSecs) throws InvalidOperationException, MetaException {
return objectStore.markScheduledExecutionsTimedOut(timeoutSecs);
}
public void deleteAllPartitionColumnStatistics(TableName tn,String s) {
objectStore.deleteAllPartitionColumnStatistics(tn,s);
}
@Override
public void createOrUpdateStoredProcedure(StoredProcedure proc) throws NoSuchObjectException, MetaException {
objectStore.createOrUpdateStoredProcedure(proc);
}
@Override
public StoredProcedure getStoredProcedure(String catName, String db, String name) throws MetaException {
return objectStore.getStoredProcedure(catName, db, name);
}
@Override
public void dropStoredProcedure(String catName, String dbName, String funcName) throws MetaException {
objectStore.dropStoredProcedure(catName, dbName, funcName);
}
@Override
public List<String> getAllStoredProcedures(ListStoredProcedureRequest request) {
return objectStore.getAllStoredProcedures(request);
}
@Override
public void addPackage(AddPackageRequest request) throws MetaException, NoSuchObjectException {
objectStore.addPackage(request);
}
@Override
public Package findPackage(GetPackageRequest request) {
return objectStore.findPackage(request);
}
@Override
public List<String> listPackages(ListPackageRequest request) {
return objectStore.listPackages(request);
}
@Override
public void dropPackage(DropPackageRequest request) {
objectStore.dropPackage(request);
}
@Override
public Map<String, Map<String, String>> updatePartitionColumnStatisticsInBatch(
Map<String, ColumnStatistics> partColStatsMap,
Table tbl, List<TransactionalMetaStoreEventListener> listeners,
String validWriteIds, long writeId)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
return objectStore.updatePartitionColumnStatisticsInBatch(partColStatsMap, tbl, listeners, validWriteIds, writeId);
}
}
| |
/*
* This file is part of Quark Framework, licensed under the APACHE License.
*
* Copyright (c) 2014-2016 Agustin L. Alvarez <wolftein1@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ar.com.quark.input;
import ar.com.quark.system.utility.array.Int32Array;
import ar.com.quark.input.device.InputKey;
import ar.com.quark.input.device.InputKeyboard;
import ar.com.quark.input.device.InputMouse;
import ar.com.quark.input.device.InputMouseButton;
import ar.com.quark.system.utility.array.ArrayFactory;
import java.util.HashSet;
import java.util.Set;
/**
* <b>Default</b> implementation for {@link InputManager}.
*/
public final class DefaultInputManager implements InputManager {
/**
* Hold all raw input listener of the manager.
*/
private final Set<InputListener> mListener = new HashSet<>();
/**
* Hold the keyboard device (Can only be one).
*/
private InputKeyboard mKeyboard;
/**
* Hold the mouse device (Can only be one).
*/
private InputMouse mMouse;
private int mCursorX;
private int mCursorY;
private final boolean[] mKey = new boolean[InputKey.VALUES.length];
private final boolean[] mButton = new boolean[InputMouseButton.VALUES.length];
/**
* Hold all the event(s) from the device(s) efficiently.
*/
private final Int32Array mBuffer = ArrayFactory.allocateInt32Array(1024);
/**
* <p>Handle when the module initialise</p>
*
* @param keyboard the keyboard implementation
* @param mouse the mouse implementation
*/
public void onModuleCreate(InputKeyboard keyboard, InputMouse mouse) {
//!
//! Initialise the <code>Keyboard</code> device
//!
mKeyboard = keyboard;
mKeyboard.create();
//!
//! Initialise the <code>Mouse</code> device
//!
mMouse = mouse;
mMouse.create();
}
/**
* <p>Handle when the module destroy</p>
*/
public void onModuleDestroy() {
//!
//! Destroy the <code>Keyboard</code> device
//!
mKeyboard.destroy();
//!
//! Destroy the <code>Mouse</code> device
//!
mMouse.destroy();
}
/**
* <p>Handle when the module update</p>
*/
public void onModuleUpdate() {
//!
//! Update <code>Keyboard</code> device.
//!
mKeyboard.update(mBuffer);
//!
//! Update <code>Mouse</code> device.
//!
mMouse.update(mBuffer);
//!
//! Process all input-event being queue by all device(s) attached.
//!
onProcessInputEvent();
}
/**
* {@inheritDoc}
*/
@Override
public void invoke(int[] event) {
switch (event[0]) {
//!
//! KEYBOARD
//!
case InputKeyboard.EVENT_KEY_UP:
onKeyboardKeyUp(InputKey.VALUES[event[1]]);
break;
case InputKeyboard.EVENT_KEY_DOWN:
onKeyboardKeyDown(InputKey.VALUES[event[1]]);
break;
case InputKeyboard.EVENT_KEY_TYPE:
onKeyboardKeyType((char) event[1]);
break;
//!
//! MOUSE
//!
case InputMouse.EVENT_MOVE:
onMouseMove(event[1], event[2]);
break;
case InputMouse.EVENT_BUTTON_UP:
onMouseButtonUp(InputMouseButton.VALUES[event[1]]);
break;
case InputMouse.EVENT_BUTTON_DOWN:
onMouseButtonDown(InputMouseButton.VALUES[event[1]]);
break;
case InputMouse.EVENT_WHEEL:
onMouseWheel(event[1]);
break;
}
}
/**
* {@inheritDoc}
*/
@Override
public void addInputListener(InputListener listener) {
mListener.add(listener);
}
/**
* {@inheritDoc}
*/
@Override
public void removeInputListener(InputListener listener) {
mListener.remove(listener);
}
/**
* {@inheritDoc}
*/
@Override
public void setCursorMode(boolean activate) {
mMouse.setCursorMode(activate);
}
/**
* {@inheritDoc}
*/
@Override
public void setCursorPosition(int x, int y) {
mMouse.setCursorPosition(x, y);
}
/**
* {@inheritDoc}
*/
@Override
public int getCursorX() {
return mCursorX;
}
/**
* {@inheritDoc}
*/
@Override
public int getCursorY() {
return mCursorY;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isButtonUp(InputMouseButton button) {
return !mButton[button.ordinal()];
}
/**
* {@inheritDoc}
*/
@Override
public boolean isButtonDown(InputMouseButton button) {
return mButton[button.ordinal()];
}
/**
* {@inheritDoc}
*/
@Override
public boolean isKeyUp(InputKey key) {
return !mKey[key.ordinal()];
}
/**
* {@inheritDoc}
*/
@Override
public boolean isKeyDown(InputKey key) {
return mKey[key.ordinal()];
}
/**
* <p>Process all input-event(s)</p>
*/
private void onProcessInputEvent() {
mBuffer.flip();
while (mBuffer.hasRemaining()) {
switch (mBuffer.read()) {
//!
//! KEYBOARD
//!
case InputKeyboard.EVENT_KEY_UP:
onKeyboardKeyUp(InputKey.VALUES[mBuffer.read()]);
break;
case InputKeyboard.EVENT_KEY_DOWN:
onKeyboardKeyDown(InputKey.VALUES[mBuffer.read()]);
break;
case InputKeyboard.EVENT_KEY_TYPE:
onKeyboardKeyType((char) mBuffer.read());
break;
//!
//! MOUSE
//!
case InputMouse.EVENT_MOVE:
onMouseMove(mBuffer.read(), mBuffer.read());
break;
case InputMouse.EVENT_BUTTON_UP:
onMouseButtonUp(InputMouseButton.VALUES[mBuffer.read()]);
break;
case InputMouse.EVENT_BUTTON_DOWN:
onMouseButtonDown(InputMouseButton.VALUES[mBuffer.read()]);
break;
case InputMouse.EVENT_WHEEL:
onMouseWheel(mBuffer.read());
break;
}
}
mBuffer.clear();
}
/**
* <p>Handle {@link InputKeyboard#EVENT_KEY_UP}</p>
*/
private void onKeyboardKeyUp(InputKey key) {
for (final InputListener listener : mListener) {
//!
//! Check if the listener has consume the event.
//!
if (!listener.onKeyboardKeyUp(key))
break;
}
mKey[key.ordinal()] = false;
}
/**
* <p>Handle {@link InputKeyboard#EVENT_KEY_DOWN}</p>
*/
private void onKeyboardKeyDown(InputKey key) {
for (final InputListener listener : mListener) {
//!
//! Check if the listener has consume the event.
//!
if (!listener.onKeyboardKeyDown(key))
break;
}
mKey[key.ordinal()] = true;
}
/**
* <p>Handle {@link InputKeyboard#EVENT_KEY_TYPE}</p>
*/
private void onKeyboardKeyType(char key) {
for (final InputListener listener : mListener) {
//!
//! Check if the listener has consume the event.
//!
if (!listener.onKeyboardKeyType(key))
break;
}
}
/**
* <p>Handle {@link InputMouse#EVENT_MOVE}</p>
*/
private void onMouseMove(int x, int y) {
final int dx = mCursorX - x;
final int dy = mCursorY - y;
for (final InputListener listener : mListener) {
//!
//! Check if the listener has consume the event.
//!
if (!listener.onMouseMove(x, y, dx, dy))
break;
}
mCursorX = x;
mCursorY = y;
}
/**
* <p>Handle {@link InputMouse#EVENT_BUTTON_UP}</p>
*/
private void onMouseButtonUp(InputMouseButton button) {
for (final InputListener listener : mListener) {
//!
//! Check if the listener has consume the event.
//!
if (!listener.onMouseButtonUp(mCursorX, mCursorY, button))
break;
}
mButton[button.ordinal()] = false;
}
/**
* <p>Handle {@link InputMouse#EVENT_BUTTON_DOWN}</p>
*/
private void onMouseButtonDown(InputMouseButton button) {
for (final InputListener listener : mListener) {
//!
//! Check if the listener has consume the event.
//!
if (!listener.onMouseButtonDown(mCursorX, mCursorY, button))
break;
}
mButton[button.ordinal()] = true;
}
/**
* <p>Handle {@link InputMouse#EVENT_WHEEL}</p>
*/
private void onMouseWheel(int delta) {
for (final InputListener listener : mListener) {
//!
//! Check if the listener has consume the event.
//!
if (!listener.onMouseWheel(mCursorX, mCursorY, delta))
break;
}
}
}
| |
/**
* $URL$
* $Id$
*
* Copyright (c) 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.lti2;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
import java.security.SecureRandom;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.imsglobal.basiclti.BasicLTIConstants;
import org.imsglobal.json.IMSJSONRequest;
import org.imsglobal.lti2.LTI2Config;
import org.imsglobal.lti2.LTI2Constants;
import org.imsglobal.lti2.LTI2Util;
import org.imsglobal.lti2.ToolProxy;
import org.imsglobal.lti2.ContentItem;
import org.imsglobal.lti2.objects.Service_offered;
import org.imsglobal.lti2.objects.StandardServices;
import org.imsglobal.lti2.objects.ToolConsumer;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.sakaiproject.basiclti.util.SakaiBLTIUtil;
import org.sakaiproject.basiclti.util.PortableShaUtil;
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.lti.api.LTIService;
import org.sakaiproject.util.ResourceLoader;
import org.sakaiproject.util.foorm.SakaiFoorm;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
/**
* Notes:
*
* This program is directly exposed as a URL to receive IMS Basic LTI messages
* so it must be carefully reviewed and any changes must be looked at carefully.
*
*/
@SuppressWarnings("deprecation")
public class LTI2Service extends HttpServlet {
private static final long serialVersionUID = 1L;
private static Log M_log = LogFactory.getLog(LTI2Service.class);
private static ResourceLoader rb = new ResourceLoader("blis");
protected static SakaiFoorm foorm = new SakaiFoorm();
protected static LTIService ltiService = null;
protected String resourceUrl = null;
protected Service_offered LTI2ResultItem = null;
protected Service_offered LTI2LtiLinkSettings = null;
protected Service_offered LTI2ToolProxyBindingSettings = null;
protected Service_offered LTI2ToolProxySettings = null;
// Copy these in...
private static final String SVC_tc_profile = SakaiBLTIUtil.SVC_tc_profile;
private static final String SVC_tc_registration = SakaiBLTIUtil.SVC_tc_registration;
private static final String SVC_Settings = SakaiBLTIUtil.SVC_Settings;
private static final String SVC_Result = SakaiBLTIUtil.SVC_Result;
private static final String LTI1_PATH = SakaiBLTIUtil.LTI1_PATH;
private static final String LTI2_PATH = SakaiBLTIUtil.LTI2_PATH;
private static final String APPLICATION_JSON = "application/json";
@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
if ( ltiService == null ) ltiService = (LTIService) ComponentManager.get("org.sakaiproject.lti.api.LTIService");
resourceUrl = SakaiBLTIUtil.getOurServerUrl() + LTI2_PATH;
LTI2ResultItem = StandardServices.LTI2ResultItem(resourceUrl
+ SVC_Result + "/{" + BasicLTIConstants.LIS_RESULT_SOURCEDID + "}");
LTI2LtiLinkSettings = StandardServices.LTI2LtiLinkSettings(resourceUrl
+ SVC_Settings + "/" + LTI2Util.SCOPE_LtiLink + "/{" + BasicLTIConstants.RESOURCE_LINK_ID + "}");
LTI2ToolProxyBindingSettings = StandardServices.LTI2ToolProxySettings(resourceUrl
+ SVC_Settings + "/" + LTI2Util.SCOPE_ToolProxyBinding + "/{" + BasicLTIConstants.RESOURCE_LINK_ID + "}");
LTI2ToolProxySettings = StandardServices.LTI2ToolProxySettings(resourceUrl
+ SVC_Settings + "/" + LTI2Util.SCOPE_ToolProxy + "/{" + LTI2Constants.TOOL_PROXY_GUID + "}");
}
protected void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request,response);
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request,response);
}
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
try {
doRequest(request, response);
} catch (Exception e) {
String ipAddress = request.getRemoteAddr();
String uri = request.getRequestURI();
M_log.warn("General LTI2 Failure URI="+uri+" IP=" + ipAddress);
e.printStackTrace();
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
doErrorJSON(request, response, null, "General failure", e);
}
}
@SuppressWarnings("unchecked")
protected void doRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
String ipAddress = request.getRemoteAddr();
M_log.debug("LTI Service request from IP=" + ipAddress);
String rpi = request.getPathInfo();
String uri = request.getRequestURI();
String [] parts = uri.split("/");
if ( parts.length < 4 ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, null, "Incorrect url format", null);
return;
}
String controller = parts[3];
if ( SVC_tc_profile.equals(controller) && parts.length == 5 ) {
String profile_id = parts[4];
getToolConsumerProfile(request,response,profile_id);
return;
} else if ( SVC_tc_registration.equals(controller) && parts.length == 5 ) {
String profile_id = parts[4];
registerToolProviderProfile(request, response, profile_id);
return;
} else if ( SVC_Result.equals(controller) && parts.length == 5 ) {
String sourcedid = parts[4];
handleResultRequest(request, response, sourcedid);
return;
} else if ( SVC_Settings.equals(controller) && parts.length >= 6 ) {
handleSettingsRequest(request, response, parts);
return;
}
IMSJSONRequest jsonRequest = new IMSJSONRequest(request);
if ( jsonRequest.valid ) {
System.out.println(jsonRequest.getPostBody());
}
response.setStatus(HttpServletResponse.SC_NOT_IMPLEMENTED);
M_log.warn("Unknown request="+uri);
doErrorJSON(request, response, null, "Unknown request="+uri, null);
}
protected void getToolConsumerProfile(HttpServletRequest request,
HttpServletResponse response,String profile_id)
{
Map<String,Object> deploy = ltiService.getDeployForConsumerKeyDao(profile_id);
if ( deploy == null ) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
return;
}
ToolConsumer consumer = getToolConsumerProfile(deploy, profile_id);
ObjectMapper mapper = new ObjectMapper();
try {
// http://stackoverflow.com/questions/6176881/how-do-i-make-jackson-pretty-print-the-json-content-it-generates
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
// ***IMPORTANT!!!*** for Jackson 2.x use the line below instead of the one above:
// ObjectWriter writer = mapper.writer().withDefaultPrettyPrinter();
// System.out.println(mapper.writeValueAsString(consumer));
response.setContentType(APPLICATION_JSON);
PrintWriter out = response.getWriter();
out.println(writer.writeValueAsString(consumer));
// System.out.println(writer.writeValueAsString(consumer));
}
catch (Exception e) {
e.printStackTrace();
}
}
protected ToolConsumer getToolConsumerProfile(Map<String, Object> deploy, String profile_id)
{
// Load the configuration data
LTI2Config cnf = new SakaiLTI2Config();
if ( cnf.getGuid() == null ) {
M_log.error("*********************************************");
M_log.error("* LTI2 NOT CONFIGURED - Using Sample Data *");
M_log.error("* Do not use this in production. Test only *");
M_log.error("*********************************************");
// cnf = new org.imsglobal.lti2.LTI2ConfigSample();
cnf = new SakaiLTI2Base();
}
String serverUrl = SakaiBLTIUtil.getOurServerUrl();
ToolConsumer consumer = new ToolConsumer(profile_id+"", resourceUrl, cnf);
consumer.allowSplitSecret();
consumer.allowHmac256();
consumer.addCapability(ContentItem.getCapability(ContentItem.TYPE_LTILINK));
consumer.addCapability(ContentItem.getCapability(ContentItem.TYPE_FILEITEM));
if (foorm.getLong(deploy.get(LTIService.LTI_SENDEMAILADDR)) > 0 ) {
consumer.allowEmail();
}
if (foorm.getLong(deploy.get(LTIService.LTI_SENDNAME)) > 0 ) {
consumer.allowName();
}
List<Service_offered> services = consumer.getService_offered();
services.add(StandardServices.LTI2Registration(serverUrl + LTI2_PATH + SVC_tc_registration + "/" + profile_id));
String allowOutcomes = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_OUTCOMES_ENABLED, SakaiBLTIUtil.BASICLTI_OUTCOMES_ENABLED_DEFAULT);
if ("true".equals(allowOutcomes) && foorm.getLong(deploy.get(LTIService.LTI_ALLOWOUTCOMES)) > 0 ) {
consumer.allowResult();
services.add(LTI2ResultItem);
services.add(StandardServices.LTI1Outcomes(serverUrl+LTI1_PATH));
services.add(SakaiLTI2Services.BasicOutcomes(serverUrl+LTI1_PATH));
}
String allowRoster = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_ROSTER_ENABLED, SakaiBLTIUtil.BASICLTI_ROSTER_ENABLED_DEFAULT);
if ("true".equals(allowRoster) && foorm.getLong(deploy.get(LTIService.LTI_ALLOWROSTER)) > 0 ) {
services.add(SakaiLTI2Services.BasicRoster(serverUrl+LTI1_PATH));
}
String allowSettings = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_SETTINGS_ENABLED, SakaiBLTIUtil.BASICLTI_SETTINGS_ENABLED_DEFAULT);
if ("true".equals(allowSettings) && foorm.getLong(deploy.get(LTIService.LTI_ALLOWSETTINGS)) > 0 ) {
consumer.allowSettings();
services.add(SakaiLTI2Services.BasicSettings(serverUrl+LTI1_PATH));
services.add(LTI2LtiLinkSettings);
services.add(LTI2ToolProxySettings);
services.add(LTI2ToolProxyBindingSettings);
}
String allowLori = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_LORI_ENABLED, SakaiBLTIUtil.BASICLTI_LORI_ENABLED_DEFAULT);
if ("true".equals(allowLori) && foorm.getLong(deploy.get(LTIService.LTI_ALLOWLORI)) > 0 ) {
services.add(SakaiLTI2Services.LORI_XML(serverUrl+LTI1_PATH));
}
return consumer;
}
public void registerToolProviderProfile(HttpServletRequest request,HttpServletResponse response,
String profile_id) throws java.io.IOException
{
// Parse the JSON
IMSJSONRequest jsonRequest = new IMSJSONRequest(request);
if ( ! jsonRequest.valid ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, "Request is not in a valid format:"+jsonRequest.errorMessage, null);
return;
}
// System.out.println(jsonRequest.getPostBody());
Map<String,Object> deploy = ltiService.getDeployForConsumerKeyDao(profile_id);
if ( deploy == null ) {
response.setStatus(HttpServletResponse.SC_NOT_FOUND);
return;
}
Long deployKey = foorm.getLong(deploy.get(LTIService.LTI_ID));
// See if we can even register...
Long reg_state = foorm.getLong(deploy.get(LTIService.LTI_REG_STATE));
String key = null;
String secret = null;
String new_secret = null;
String ack = null;
if ( reg_state == 0 ) {
key = (String) deploy.get(LTIService.LTI_REG_KEY);
secret = (String) deploy.get(LTIService.LTI_REG_PASSWORD);
} else {
key = (String) deploy.get(LTIService.LTI_CONSUMERKEY);
secret = (String) deploy.get(LTIService.LTI_SECRET);
secret = SakaiBLTIUtil.decryptSecret(secret);
ack = request.getHeader("VND-IMS-CONFIRM-URL");
if ( ack == null || ack.length() < 1 ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, "Re-registration requires VND-IMS-CONFIRM-URL header", null);
return;
}
}
// Lets check the signature
if ( key == null || secret == null ) {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
doErrorJSON(request, response, jsonRequest, "Deployment is missing credentials", null);
return;
}
jsonRequest.validateRequest(key, secret, request);
if ( !jsonRequest.valid ) {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
doErrorJSON(request, response, jsonRequest, "OAuth signature failure", null);
return;
}
ToolProxy toolProxy = null;
try {
toolProxy = new ToolProxy(jsonRequest.getPostBody());
// System.out.println("OBJ:"+toolProxy);
} catch (Throwable t ) {
t.printStackTrace();
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, "JSON parse failed", null);
return;
}
JSONObject default_custom = toolProxy.getCustom();
JSONObject security_contract = toolProxy.getSecurityContract();
if ( security_contract == null ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, "JSON missing security_contract", null);
return;
}
String shared_secret = (String) security_contract.get(LTI2Constants.SHARED_SECRET);
String tp_half_shared_secret = (String) security_contract.get(LTI2Constants.TP_HALF_SHARED_SECRET);
String tc_half_shared_secret = null;
if ( tp_half_shared_secret != null ) {
if ( ! tp_half_shared_secret.matches("^[a-f0-9]*$") ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, "tp_half_shared secret lower-case hex only", null);
return;
}
if ( tp_half_shared_secret.length() != 128 ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, "tp_half_shared secret must be 128 characters", null);
return;
}
SecureRandom random = new SecureRandom();
byte bytes[] = new byte[512/8];
random.nextBytes(bytes);
tc_half_shared_secret = PortableShaUtil.bin2hex(bytes);
if ( shared_secret != null ) security_contract.put(LTI2Constants.SHARED_SECRET, "*********");
shared_secret = tc_half_shared_secret + tp_half_shared_secret;
security_contract.put(LTI2Constants.TP_HALF_SHARED_SECRET, "*********");
} else {
if ( shared_secret != null ) security_contract.put(LTI2Constants.SHARED_SECRET, "*********");
if ( tp_half_shared_secret != null ) security_contract.put(LTI2Constants.TP_HALF_SHARED_SECRET, "*********");
}
if ( shared_secret == null ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, "JSON missing shared_secret", null);
return;
}
// Make sure that the requested services are a subset of the offered services
ToolConsumer consumer = getToolConsumerProfile(deploy, profile_id);
JSONArray tool_services = (JSONArray) security_contract.get(LTI2Constants.TOOL_SERVICE);
String retval = toolProxy.validateServices(consumer);
if ( retval != null ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, retval, null);
return;
}
// Parse the tool profile bit and extract the tools with error checking
retval = toolProxy.validateCapabilities(consumer);
if ( retval != null ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, retval, null);
return;
}
// Passed all the tests, lets commit this...
Map<String, Object> deployUpdate = new TreeMap<String, Object> ();
shared_secret = SakaiBLTIUtil.encryptSecret(shared_secret);
if ( reg_state == 0 ) {
deployUpdate.put(LTIService.LTI_SECRET, shared_secret);
} else {
// In Re-Registration, the new secret is not committed until Activation
deployUpdate.put(LTIService.LTI_NEW_SECRET, shared_secret);
}
// Indicate ready to validate and kill the interim info
deployUpdate.put(LTIService.LTI_REG_STATE, LTIService.LTI_REG_STATE_REGISTERED);
deployUpdate.put(LTIService.LTI_REG_KEY, "");
deployUpdate.put(LTIService.LTI_REG_ACK, ack);
deployUpdate.put(LTIService.LTI_REG_PASSWORD, "");
if ( default_custom != null ) deployUpdate.put(LTIService.LTI_SETTINGS, default_custom.toString());
deployUpdate.put(LTIService.LTI_REG_PROFILE, toolProxy.toString());
M_log.debug("deployUpdate="+deployUpdate);
Object obj = ltiService.updateDeployDao(deployKey, deployUpdate);
boolean success = ( obj instanceof Boolean ) && ( (Boolean) obj == Boolean.TRUE);
if ( ! success ) {
M_log.warn("updateDeployDao fail deployKey="+deployKey+"\nretval="+obj+"\ndata="+deployUpdate);
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, jsonRequest, "Failed update of deployment="+deployKey, null);
return;
}
// Share our happiness with the Tool Provider
Map jsonResponse = new TreeMap();
jsonResponse.put(LTI2Constants.CONTEXT,StandardServices.TOOLPROXY_ID_CONTEXT);
jsonResponse.put(LTI2Constants.TYPE, StandardServices.TOOLPROXY_ID_TYPE);
String serverUrl = ServerConfigurationService.getServerUrl();
jsonResponse.put(LTI2Constants.JSONLD_ID, resourceUrl + SVC_tc_registration + "/" +profile_id);
jsonResponse.put(LTI2Constants.TOOL_PROXY_GUID, profile_id);
jsonResponse.put(LTI2Constants.CUSTOM_URL, resourceUrl + SVC_Settings + "/" + LTI2Util.SCOPE_ToolProxy + "/" +profile_id);
if ( tc_half_shared_secret != null ) jsonResponse.put(LTI2Constants.TC_HALF_SHARED_SECRET, tc_half_shared_secret);
response.setContentType(StandardServices.TOOLPROXY_ID_FORMAT);
response.setStatus(HttpServletResponse.SC_CREATED);
String jsonText = JSONValue.toJSONString(jsonResponse);
M_log.debug(jsonText);
PrintWriter out = response.getWriter();
out.println(jsonText);
}
public void handleResultRequest(HttpServletRequest request,HttpServletResponse response,
String sourcedid) throws java.io.IOException
{
String allowOutcomes = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_OUTCOMES_ENABLED, SakaiBLTIUtil.BASICLTI_OUTCOMES_ENABLED_DEFAULT);
if ( ! "true".equals(allowOutcomes) ) {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
doErrorJSON(request,response, null, "Result resources not available", null);
return;
}
Object retval = null;
IMSJSONRequest jsonRequest = null;
if ( "GET".equals(request.getMethod()) ) {
retval = SakaiBLTIUtil.getGrade(sourcedid, request, ltiService);
if ( ! (retval instanceof Map) ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, (String) retval, null);
return;
}
Map grade = (Map) retval;
Map jsonResponse = new TreeMap();
Map resultScore = new TreeMap();
jsonResponse.put(LTI2Constants.CONTEXT,StandardServices.RESULT_CONTEXT);
jsonResponse.put(LTI2Constants.TYPE, StandardServices.RESULT_TYPE);
jsonResponse.put(LTI2Constants.COMMENT, grade.get(LTI2Constants.COMMENT));
resultScore.put(LTI2Constants.TYPE, LTI2Constants.GRADE_TYPE_DECIMAL);
resultScore.put(LTI2Constants.VALUE, grade.get(LTI2Constants.GRADE));
jsonResponse.put(LTI2Constants.RESULTSCORE,resultScore);
response.setContentType(StandardServices.RESULT_FORMAT);
response.setStatus(HttpServletResponse.SC_OK);
String jsonText = JSONValue.toJSONString(jsonResponse);
M_log.debug(jsonText);
PrintWriter out = response.getWriter();
out.println(jsonText);
} else if ( "PUT".equals(request.getMethod()) ) {
retval = "Error parsing input data";
try {
jsonRequest = new IMSJSONRequest(request);
// System.out.println(jsonRequest.getPostBody());
JSONObject requestData = (JSONObject) JSONValue.parse(jsonRequest.getPostBody());
String comment = (String) requestData.get(LTI2Constants.COMMENT);
JSONObject resultScore = (JSONObject) requestData.get(LTI2Constants.RESULTSCORE);
Object oGrade = resultScore.get(LTI2Constants.VALUE);
Double dGrade = null;
if ( oGrade instanceof String ) {
dGrade = new Double((String) oGrade);
} else if ( oGrade instanceof Number ) {
dGrade = (Double) oGrade;
}
if ( dGrade != null ) {
retval = SakaiBLTIUtil.setGrade(sourcedid, request, ltiService, dGrade, comment);
} else {
retval = "Unable to parse grade="+oGrade;
}
} catch (Exception e) {
retval = "Error: "+ e.getMessage();
}
if ( retval instanceof Boolean && (Boolean) retval ) {
response.setStatus(HttpServletResponse.SC_OK);
} else {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
}
} else {
retval = "Unsupported operation:" + request.getMethod();
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
}
if ( retval instanceof String ) {
doErrorJSON(request,response, jsonRequest, (String) retval, null);
return;
}
}
// If this code looks like a hack - it is because the spec is a hack.
// There are five possible scenarios for GET and two possible scenarios
// for PUT. I begged to simplify the business logic but was overrulled.
// So we write obtuse code.
public void handleSettingsRequest(HttpServletRequest request,HttpServletResponse response,
String[] parts) throws java.io.IOException
{
String allowSettings = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_SETTINGS_ENABLED, SakaiBLTIUtil.BASICLTI_SETTINGS_ENABLED_DEFAULT);
if ( ! "true".equals(allowSettings) ) {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
doErrorJSON(request,response, null, "Tool settings not available", null);
return;
}
String URL = SakaiBLTIUtil.getOurServletPath(request);
String scope = parts[4];
// Check to see if we are doing the bubble
String bubbleStr = request.getParameter("bubble");
String acceptHdr = request.getHeader("Accept");
String contentHdr = request.getContentType();
M_log.debug("accept="+acceptHdr+" bubble="+bubbleStr);
if ( bubbleStr != null && bubbleStr.equals("all") &&
acceptHdr.indexOf(StandardServices.TOOLSETTINGS_FORMAT) < 0 ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request, response, null, "Simple format does not allow bubble=all", null);
return;
}
boolean bubble = bubbleStr != null && "GET".equals(request.getMethod());
boolean distinct = bubbleStr != null && "distinct".equals(bubbleStr) && "GET".equals(request.getMethod());
boolean bubbleAll = bubbleStr != null && "all".equals(bubbleStr) && "GET".equals(request.getMethod());
// Check our input and output formats
boolean acceptSimple = acceptHdr == null || acceptHdr.indexOf(StandardServices.TOOLSETTINGS_SIMPLE_FORMAT) >= 0 ;
boolean acceptComplex = acceptHdr == null || acceptHdr.indexOf(StandardServices.TOOLSETTINGS_FORMAT) >= 0 ;
boolean inputSimple = contentHdr == null || contentHdr.indexOf(StandardServices.TOOLSETTINGS_SIMPLE_FORMAT) >= 0 ;
boolean inputComplex = contentHdr != null && contentHdr.indexOf(StandardServices.TOOLSETTINGS_FORMAT) >= 0 ;
M_log.debug("as="+acceptSimple+" ac="+acceptComplex+" is="+inputSimple+" ic="+inputComplex);
// Check the JSON on PUT and check the oauth_body_hash
IMSJSONRequest jsonRequest = null;
JSONObject requestData = null;
if ( "PUT".equals(request.getMethod()) ) {
try {
jsonRequest = new IMSJSONRequest(request);
requestData = (JSONObject) JSONValue.parse(jsonRequest.getPostBody());
} catch (Exception e) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, "Could not parse JSON", e);
return;
}
}
String consumer_key = null;
String siteId = null;
String placement_id = null;
Map<String,Object> content = null;
Long contentKey = null;
Map<String,Object> tool = null;
Long toolKey = null;
Map<String,Object> proxyBinding = null;
Long proxyBindingKey = null;
Map<String,Object> deploy = null;
Long deployKey = null;
if ( LTI2Util.SCOPE_LtiLink.equals(scope) || LTI2Util.SCOPE_ToolProxyBinding.equals(scope) ) {
placement_id = parts[5];
M_log.debug("placement_id="+placement_id);
String contentStr = placement_id.substring(8);
contentKey = SakaiBLTIUtil.getLongKey(contentStr);
if ( contentKey >= 0 ) {
// Leave off the siteId - bypass all checking - because we need to
// find the siteId from the content item
content = ltiService.getContentDao(contentKey);
if ( content != null ) siteId = (String) content.get(LTIService.LTI_SITE_ID);
}
if ( content == null || siteId == null ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, "Bad content item", null);
return;
}
toolKey = SakaiBLTIUtil.getLongKey(content.get(LTIService.LTI_TOOL_ID));
if ( toolKey >= 0 ) {
tool = ltiService.getToolDao(toolKey, siteId);
}
if ( tool == null ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, "Bad tool item", null);
return;
}
// Adjust the content items based on the tool items
ltiService.filterContent(content, tool);
// Check settings to see if we are allowed to do this
if (foorm.getLong(content.get(LTIService.LTI_ALLOWOUTCOMES)) > 0 ||
foorm.getLong(tool.get(LTIService.LTI_ALLOWOUTCOMES)) > 0 ) {
// Good news
} else {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
doErrorJSON(request,response, jsonRequest, "Item does not allow tool settings", null);
return;
}
}
if ( LTI2Util.SCOPE_ToolProxyBinding.equals(scope) || LTI2Util.SCOPE_LtiLink.equals(scope) ) {
proxyBinding = ltiService.getProxyBindingDao(toolKey,siteId);
if ( proxyBinding != null ) {
proxyBindingKey = SakaiBLTIUtil.getLongKey(proxyBinding.get(LTIService.LTI_ID));
}
}
// Retrieve the deployment if needed
if ( LTI2Util.SCOPE_ToolProxy.equals(scope) ) {
consumer_key = parts[5];
deploy = ltiService.getDeployForConsumerKeyDao(consumer_key);
if ( deploy == null ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, "Bad deploy item", null);
return;
}
deployKey = SakaiBLTIUtil.getLongKey(deploy.get(LTIService.LTI_ID));
} else {
if ( tool == null ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, "Bad tool item", null);
return;
}
deployKey = SakaiBLTIUtil.getLongKey(tool.get(LTIService.LTI_DEPLOYMENT_ID));
if ( deployKey >= 0 ) {
deploy = ltiService.getDeployDao(deployKey);
}
if ( deploy == null ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, "Bad deploy item", null);
return;
}
consumer_key = (String) deploy.get(LTIService.LTI_CONSUMERKEY);
}
// Check settings to see if we are allowed to do this
if ( deploy != null ) {
if (foorm.getLong(deploy.get(LTIService.LTI_ALLOWOUTCOMES)) > 0 ) {
// Good news
} else {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
doErrorJSON(request,response, jsonRequest, "Deployment does not allow tool settings", null);
return;
}
}
// The URLs for the various settings resources
String settingsUrl = SakaiBLTIUtil.getOurServerUrl() + LTI2_PATH + SVC_Settings;
String proxy_url = settingsUrl + "/" + LTI2Util.SCOPE_ToolProxy + "/" + consumer_key;
String binding_url = settingsUrl + "/" + LTI2Util.SCOPE_ToolProxyBinding + "/" + placement_id;
String link_url = settingsUrl + "/" + LTI2Util.SCOPE_LtiLink + "/" + placement_id;
// Load and parse the old settings...
JSONObject link_settings = new JSONObject ();
JSONObject binding_settings = new JSONObject ();
JSONObject proxy_settings = new JSONObject();
if ( content != null ) {
link_settings = LTI2Util.parseSettings((String) content.get(LTIService.LTI_SETTINGS));
}
if ( proxyBinding != null ) {
binding_settings = LTI2Util.parseSettings((String) proxyBinding.get(LTIService.LTI_SETTINGS));
}
if ( deploy != null ) {
proxy_settings = LTI2Util.parseSettings((String) deploy.get(LTIService.LTI_SETTINGS));
}
/*
if ( distinct && link_settings != null && scope.equals(LTI2Util.SCOPE_LtiLink) ) {
Iterator i = link_settings.keySet().iterator();
while ( i.hasNext() ) {
String key = (String) i.next();
if ( binding_settings != null ) binding_settings.remove(key);
if ( proxy_settings != null ) proxy_settings.remove(key);
}
}
if ( distinct && binding_settings != null && scope.equals(LTI2Util.SCOPE_ToolProxyBinding) ) {
Iterator i = binding_settings.keySet().iterator();
while ( i.hasNext() ) {
String key = (String) i.next();
if ( proxy_settings != null ) proxy_settings.remove(key);
}
}
*/
// Get the secret for the request...
String oauth_secret = null;
if ( LTI2Util.SCOPE_LtiLink.equals(scope) ) {
oauth_secret = (String) content.get(LTIService.LTI_SECRET);
if ( oauth_secret == null || oauth_secret.length() < 1 ) {
oauth_secret = (String) tool.get(LTIService.LTI_SECRET);
}
} else if ( LTI2Util.SCOPE_ToolProxyBinding.equals(scope) ) {
oauth_secret = (String) tool.get(LTIService.LTI_SECRET);
} else if ( LTI2Util.SCOPE_ToolProxy.equals(scope) ) {
oauth_secret = (String) deploy.get(LTIService.LTI_SECRET);
} else {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, "Bad Setttings Scope="+scope, null);
return;
}
// Make sure we have a key and secret
if ( oauth_secret == null || consumer_key == null ) {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
doErrorJSON(request,response, jsonRequest, "Key or secret is null, key="+consumer_key, null);
return;
}
// Validate the incoming message
Object retval = SakaiBLTIUtil.validateMessage(request, URL, oauth_secret, consumer_key);
if ( retval instanceof String ) {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
doErrorJSON(request,response, jsonRequest, (String) retval, null);
return;
}
// For a GET request we depend on LTI2Util to do the GET logic
if ( "GET".equals(request.getMethod()) ) {
Object obj = LTI2Util.getSettings(request, scope,
link_settings, binding_settings, proxy_settings,
link_url, binding_url, proxy_url);
if ( obj instanceof String ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, (String) obj, null);
return;
}
if ( acceptComplex ) {
response.setContentType(StandardServices.TOOLSETTINGS_FORMAT);
} else {
response.setContentType(StandardServices.TOOLSETTINGS_SIMPLE_FORMAT);
}
JSONObject jsonResponse = (JSONObject) obj;
response.setStatus(HttpServletResponse.SC_OK);
PrintWriter out = response.getWriter();
M_log.debug("jsonResponse="+jsonResponse);
out.println(jsonResponse.toString());
return;
} else if ( "PUT".equals(request.getMethod()) ) {
// This is assuming the rule that a PUT of the complex settings
// format that there is only one entry in the graph and it is
// the same as our current URL. We parse without much checking.
String settings = null;
try {
JSONArray graph = (JSONArray) requestData.get(LTI2Constants.GRAPH);
if ( graph.size() != 1 ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, "Only one graph entry allowed", null);
return;
}
JSONObject firstChild = (JSONObject) graph.get(0);
JSONObject custom = (JSONObject) firstChild.get(LTI2Constants.CUSTOM);
settings = custom.toString();
} catch (Exception e) {
settings = jsonRequest.getPostBody();
}
retval = null;
if ( LTI2Util.SCOPE_LtiLink.equals(scope) ) {
content.put(LTIService.LTI_SETTINGS, settings);
retval = ltiService.updateContentDao(contentKey,content,siteId);
} else if ( LTI2Util.SCOPE_ToolProxyBinding.equals(scope) ) {
if ( proxyBinding != null ) {
proxyBinding.put(LTIService.LTI_SETTINGS, settings);
retval = ltiService.updateProxyBindingDao(proxyBindingKey,proxyBinding);
} else {
Properties proxyBindingNew = new Properties();
proxyBindingNew.setProperty(LTIService.LTI_SITE_ID, siteId);
proxyBindingNew.setProperty(LTIService.LTI_TOOL_ID, toolKey+"");
proxyBindingNew.setProperty(LTIService.LTI_SETTINGS, settings);
retval = ltiService.insertProxyBindingDao(proxyBindingNew);
M_log.info("inserted ProxyBinding setting="+proxyBindingNew);
}
} else if ( LTI2Util.SCOPE_ToolProxy.equals(scope) ) {
deploy.put(LTIService.LTI_SETTINGS, settings);
retval = ltiService.updateDeployDao(deployKey,deploy);
}
if ( retval instanceof String ||
( retval instanceof Boolean && ((Boolean) retval != Boolean.TRUE) ) ) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, (String) retval, null);
return;
}
response.setStatus(HttpServletResponse.SC_OK);
} else {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
doErrorJSON(request,response, jsonRequest, "Method not handled="+request.getMethod(), null);
}
}
/* IMS JSON version of Errors */
public void doErrorJSON(HttpServletRequest request,HttpServletResponse response,
IMSJSONRequest json, String message, Exception e)
throws java.io.IOException
{
if (e != null) {
M_log.error(e.getLocalizedMessage(), e);
}
M_log.info(message);
if ( json != null ) M_log.info(json.postBody);
String jsonText = IMSJSONRequest.doErrorJSON(request, response, json, message, e);
M_log.info(jsonText);
}
public void destroy() {
}
}
| |
package edu.thu.ggxx.hellobrazil.wc2014.diagram.parsers;
import java.text.FieldPosition;
import java.text.MessageFormat;
import java.text.ParsePosition;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.gmf.runtime.common.core.command.ICommand;
import org.eclipse.gmf.runtime.common.ui.services.parser.IParserEditStatus;
import org.eclipse.gmf.runtime.common.ui.services.parser.ParserEditStatus;
import org.eclipse.gmf.tooling.runtime.parsers.AbstractAttributeParser;
import org.eclipse.osgi.util.NLS;
import edu.thu.ggxx.hellobrazil.wc2014.diagram.part.Messages;
import edu.thu.ggxx.hellobrazil.wc2014.diagram.part.Wc2014DiagramEditorPlugin;
/**
* @generated
*/
public class MessageFormatParser extends AbstractAttributeParser {
/**
* @generated
*/
private String defaultPattern;
/**
* @generated
*/
private String defaultEditablePattern;
/**
* @generated
*/
private MessageFormat viewProcessor;
/**
* @generated
*/
private MessageFormat editorProcessor;
/**
* @generated
*/
private MessageFormat editProcessor;
/**
* @generated
*/
public MessageFormatParser(EAttribute[] features) {
super(features);
}
/**
* @generated
*/
public MessageFormatParser(EAttribute[] features,
EAttribute[] editableFeatures) {
super(features, editableFeatures);
}
/**
* @generated
*/
protected String getDefaultPattern() {
if (defaultPattern == null) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < features.length; i++) {
if (i > 0) {
sb.append(' ');
}
sb.append('{');
sb.append(i);
sb.append('}');
}
defaultPattern = sb.toString();
}
return defaultPattern;
}
/**
* @generated
*/
public void setViewPattern(String viewPattern) {
super.setViewPattern(viewPattern);
viewProcessor = null;
}
/**
* @generated
*/
public void setEditorPattern(String editorPattern) {
super.setEditorPattern(editorPattern);
editorProcessor = null;
}
/**
* @generated
*/
protected MessageFormat getViewProcessor() {
if (viewProcessor == null) {
viewProcessor = new MessageFormat(
getViewPattern() == null ? getDefaultPattern()
: getViewPattern());
}
return viewProcessor;
}
/**
* @generated
*/
protected MessageFormat getEditorProcessor() {
if (editorProcessor == null) {
editorProcessor = new MessageFormat(
getEditorPattern() == null ? getDefaultEditablePattern()
: getEditorPattern());
}
return editorProcessor;
}
/**
* @generated
*/
protected String getDefaultEditablePattern() {
if (defaultEditablePattern == null) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < editableFeatures.length; i++) {
if (i > 0) {
sb.append(' ');
}
sb.append('{');
sb.append(i);
sb.append('}');
}
defaultEditablePattern = sb.toString();
}
return defaultEditablePattern;
}
/**
* @generated
*/
public void setEditPattern(String editPattern) {
super.setEditPattern(editPattern);
editProcessor = null;
}
/**
* @generated
*/
protected MessageFormat getEditProcessor() {
if (editProcessor == null) {
editProcessor = new MessageFormat(
getEditPattern() == null ? getDefaultEditablePattern()
: getEditPattern());
}
return editProcessor;
}
/**
* @generated
*/
public String getEditString(IAdaptable adapter, int flags) {
EObject element = (EObject) adapter.getAdapter(EObject.class);
return getEditorProcessor().format(getEditableValues(element),
new StringBuffer(), new FieldPosition(0)).toString();
}
/**
* @generated
*/
public IParserEditStatus isValidEditString(IAdaptable adapter,
String editString) {
ParsePosition pos = new ParsePosition(0);
Object[] values = getEditProcessor().parse(editString, pos);
if (values == null) {
return new ParserEditStatus(Wc2014DiagramEditorPlugin.ID,
IParserEditStatus.UNEDITABLE, NLS.bind(
Messages.MessageFormatParser_InvalidInputError,
new Integer(pos.getErrorIndex())));
}
return validateNewValues(values);
}
/**
* @generated
*/
public ICommand getParseCommand(IAdaptable adapter, String newString,
int flags) {
Object[] values = getEditProcessor().parse(newString,
new ParsePosition(0));
return getParseCommand(adapter, values, flags);
}
/**
* @generated
*/
public String getPrintString(IAdaptable adapter, int flags) {
EObject element = (EObject) adapter.getAdapter(EObject.class);
return getViewProcessor().format(getValues(element),
new StringBuffer(), new FieldPosition(0)).toString();
}
}
| |
/*
Copyright (c) 2007 Thomas Boerkel, All Rights Reserved
Disclaimer:
===========
This code is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This code is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
*/
package hudson.util.jna;
import com.sun.jna.ptr.IntByReference;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.TreeMap;
import java.util.TreeSet;
/**
* Represents a Win32 registry key.
*
* @author Kohsuke Kawaguchi
*/
public class RegistryKey implements AutoCloseable {
/**
* 32bit Windows key value.
*/
private int handle;
private final RegistryKey root;
private final String path;
/**
* Constructor for the root key.
*/
private RegistryKey(int handle) {
this.handle = handle;
root = this;
path = "";
}
private RegistryKey(RegistryKey ancestor, String path, int handle) {
this.handle = handle;
this.root = ancestor.root;
this.path = combine(ancestor.path, path);
}
private static String combine(String a, String b) {
if (a.length() == 0) return b;
if (b.length() == 0) return a;
return a + '\\' + b;
}
/**
* Converts a Windows buffer to a Java String.
*
* @param buf buffer
* @return String
* @throws java.io.UnsupportedEncodingException on error
*/
static String convertBufferToString(byte[] buf) {
return new String(buf, 0, buf.length - 2, StandardCharsets.UTF_16LE);
}
/**
* Converts a Windows buffer to an int.
*
* @param buf buffer
* @return int
*/
static int convertBufferToInt(byte[] buf) {
return ((buf[0] & 0xff) + ((buf[1] & 0xff) << 8) + ((buf[2] & 0xff) << 16) + ((buf[3] & 0xff) << 24));
}
public String getStringValue(String valueName) {
return convertBufferToString(getValue(valueName));
}
/**
* Read an int value.
*/
public int getIntValue(String valueName) {
return convertBufferToInt(getValue(valueName));
}
private byte[] getValue(String valueName) {
IntByReference pType, lpcbData;
byte[] lpData = new byte[1];
pType = new IntByReference();
lpcbData = new IntByReference();
OUTER:
while (true) {
int r = Advapi32.INSTANCE.RegQueryValueEx(handle, valueName, null, pType, lpData, lpcbData);
switch (r) {
case WINERROR.ERROR_MORE_DATA:
lpData = new byte[lpcbData.getValue()];
continue OUTER;
case WINERROR.ERROR_SUCCESS:
return lpData;
default:
throw new JnaException(r);
}
}
}
public void deleteValue(String valueName) {
check(Advapi32.INSTANCE.RegDeleteValue(handle, valueName));
}
private void check(int r) {
if (r != WINERROR.ERROR_SUCCESS)
throw new JnaException(r);
}
/**
* Writes a String value.
*/
public void setValue(String name, String value) {
byte[] bytes = value.getBytes(StandardCharsets.UTF_16LE);
int newLength = bytes.length + 2; // for 0 padding
byte[] with0 = new byte[newLength];
System.arraycopy(bytes, 0, with0, 0, newLength);
check(Advapi32.INSTANCE.RegSetValueEx(handle, name, 0, WINNT.REG_SZ, with0, with0.length));
}
/**
* Writes a DWORD value.
*/
public void setValue(String name, int value) {
byte[] data = new byte[4];
data[0] = (byte) (value & 0xff);
data[1] = (byte) ((value >> 8) & 0xff);
data[2] = (byte) ((value >> 16) & 0xff);
data[3] = (byte) ((value >> 24) & 0xff);
check(Advapi32.INSTANCE.RegSetValueEx(handle, name, 0, WINNT.REG_DWORD, data, data.length));
}
/**
* Does a specified value exist?
*/
public boolean valueExists(String name) {
IntByReference pType, lpcbData;
byte[] lpData = new byte[1];
pType = new IntByReference();
lpcbData = new IntByReference();
OUTER:
while (true) {
int r = Advapi32.INSTANCE.RegQueryValueEx(handle, name, null, pType, lpData, lpcbData);
switch (r) {
case WINERROR.ERROR_MORE_DATA:
lpData = new byte[lpcbData.getValue()];
continue OUTER;
case WINERROR.ERROR_FILE_NOT_FOUND:
return false;
case WINERROR.ERROR_SUCCESS:
return true;
default:
throw new JnaException(r);
}
}
}
/**
* Deletes this key (and disposes the key.)
*/
public void delete() {
check(Advapi32.INSTANCE.RegDeleteKey(handle, path));
dispose();
}
/**
* Get all sub keys of a key.
*
* @return array with all sub key names
*/
public Collection<String> getSubKeys() {
WINBASE.FILETIME lpftLastWriteTime;
TreeSet<String> subKeys = new TreeSet<>();
char[] lpName = new char[256];
IntByReference lpcName = new IntByReference(256);
lpftLastWriteTime = new WINBASE.FILETIME();
int dwIndex = 0;
while (Advapi32.INSTANCE.RegEnumKeyEx(handle, dwIndex, lpName, lpcName, null,
null, null, lpftLastWriteTime) == WINERROR.ERROR_SUCCESS) {
subKeys.add(new String(lpName, 0, lpcName.getValue()));
lpcName.setValue(256);
dwIndex++;
}
return subKeys;
}
public RegistryKey open(String subKeyName) {
return open(subKeyName, 0xF003F/*KEY_ALL_ACCESS*/);
}
public RegistryKey openReadonly(String subKeyName) {
return open(subKeyName, 0x20019/*KEY_READ*/);
}
public RegistryKey open(String subKeyName, int access) {
IntByReference pHandle = new IntByReference();
check(Advapi32.INSTANCE.RegOpenKeyEx(handle, subKeyName, 0, access, pHandle));
return new RegistryKey(this, subKeyName, pHandle.getValue());
}
/**
* Get all values under a key.
*
* @return TreeMap with name and value pairs
*/
public TreeMap<String, Object> getValues() {
int dwIndex, result;
char[] lpValueName;
byte[] lpData;
IntByReference lpcchValueName, lpType, lpcbData;
String name;
TreeMap<String, Object> values = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
lpValueName = new char[16384];
lpcchValueName = new IntByReference(16384);
lpType = new IntByReference();
lpData = new byte[1];
lpcbData = new IntByReference();
lpcbData.setValue(0);
dwIndex = 0;
OUTER:
while (true) {
result = Advapi32.INSTANCE.RegEnumValue(handle, dwIndex, lpValueName, lpcchValueName, null,
lpType, lpData, lpcbData);
switch (result) {
case WINERROR.ERROR_NO_MORE_ITEMS:
return values;
case WINERROR.ERROR_MORE_DATA:
lpData = new byte[lpcbData.getValue()];
lpcchValueName = new IntByReference(16384);
continue OUTER;
case WINERROR.ERROR_SUCCESS:
name = new String(lpValueName, 0, lpcchValueName.getValue());
switch (lpType.getValue()) {
case WINNT.REG_SZ:
values.put(name, convertBufferToString(lpData));
break;
case WINNT.REG_DWORD:
values.put(name, convertBufferToInt(lpData));
break;
default:
break; // not supported yet
}
break;
default:
check(result);
}
dwIndex++;
lpcbData.setValue(0);
}
}
@Override
protected void finalize() throws Throwable {
super.finalize();
dispose();
}
public void dispose() {
if (handle != 0)
Advapi32.INSTANCE.RegCloseKey(handle);
handle = 0;
}
@Override
public void close() {
dispose();
}
//
// Root keys
//
public static final RegistryKey CLASSES_ROOT = new RegistryKey(0x80000000);
public static final RegistryKey CURRENT_USER = new RegistryKey(0x80000001);
public static final RegistryKey LOCAL_MACHINE = new RegistryKey(0x80000002);
public static final RegistryKey USERS = new RegistryKey(0x80000003);
}
| |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts;
import java.util.Collections;
import java.util.List;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.Label;
import org.eclipse.draw2d.geometry.Point;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.transaction.RunnableWithResult;
import org.eclipse.gef.AccessibleEditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.requests.DirectEditRequest;
import org.eclipse.gef.tools.DirectEditManager;
import org.eclipse.gmf.runtime.common.ui.services.parser.IParser;
import org.eclipse.gmf.runtime.common.ui.services.parser.IParserEditStatus;
import org.eclipse.gmf.runtime.common.ui.services.parser.ParserEditStatus;
import org.eclipse.gmf.runtime.common.ui.services.parser.ParserOptions;
import org.eclipse.gmf.runtime.diagram.ui.editparts.CompartmentEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.ITextAwareEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.LabelDirectEditPolicy;
import org.eclipse.gmf.runtime.diagram.ui.l10n.DiagramColorRegistry;
import org.eclipse.gmf.runtime.diagram.ui.label.ILabelDelegate;
import org.eclipse.gmf.runtime.diagram.ui.label.WrappingLabelDelegate;
import org.eclipse.gmf.runtime.diagram.ui.requests.RequestConstants;
import org.eclipse.gmf.runtime.diagram.ui.tools.TextDirectEditManager;
import org.eclipse.gmf.runtime.draw2d.ui.figures.WrappingLabel;
import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter;
import org.eclipse.gmf.runtime.emf.ui.services.parser.ISemanticParser;
import org.eclipse.gmf.runtime.notation.FontStyle;
import org.eclipse.gmf.runtime.notation.NotationPackage;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.gmf.tooling.runtime.directedit.TextDirectEditManager2;
import org.eclipse.gmf.tooling.runtime.draw2d.labels.SimpleLabelDelegate;
import org.eclipse.gmf.tooling.runtime.edit.policies.DefaultNodeLabelDragPolicy;
import org.eclipse.gmf.tooling.runtime.edit.policies.labels.IRefreshableFeedbackEditPolicy;
import org.eclipse.jface.text.contentassist.IContentAssistProcessor;
import org.eclipse.jface.viewers.ICellEditorValidator;
import org.eclipse.swt.SWT;
import org.eclipse.swt.accessibility.AccessibleEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.Image;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.MediatorFigureSelectionListener;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.EsbTextSelectionEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbParserProvider;
/**
* @generated
*/
public class BuilderMediatorDescriptionEditPart extends CompartmentEditPart
implements ITextAwareEditPart {
/**
* @generated
*/
public static final int VISUAL_ID = 5189;
/**
* @generated
*/
private DirectEditManager manager;
/**
* @generated
*/
private IParser parser;
/**
* @generated
*/
private List<?> parserElements;
/**
* @generated
*/
private String defaultText;
/**
* @generated
*/
private ILabelDelegate labelDelegate;
/**
* @generated
*/
public BuilderMediatorDescriptionEditPart(View view) {
super(view);
}
/**
* @generated
*/
protected void createDefaultEditPolicies() {
super.createDefaultEditPolicies();
installEditPolicy(EditPolicy.SELECTION_FEEDBACK_ROLE,
new EsbTextSelectionEditPolicy());
installEditPolicy(EditPolicy.DIRECT_EDIT_ROLE,
new LabelDirectEditPolicy());
installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE,
new DefaultNodeLabelDragPolicy());
}
/**
* @generated
*/
protected String getLabelTextHelper(IFigure figure) {
if (figure instanceof WrappingLabel) {
return ((WrappingLabel) figure).getText();
} else if (figure instanceof Label) {
return ((Label) figure).getText();
} else {
return getLabelDelegate().getText();
}
}
/**
* @generated
*/
protected void setLabelTextHelper(IFigure figure, String text) {
if (figure instanceof WrappingLabel) {
((WrappingLabel) figure).setText(text);
} else if (figure instanceof Label) {
((Label) figure).setText(text);
} else {
getLabelDelegate().setText(text);
}
}
/**
* @generated
*/
protected Image getLabelIconHelper(IFigure figure) {
if (figure instanceof WrappingLabel) {
return ((WrappingLabel) figure).getIcon();
} else if (figure instanceof Label) {
return ((Label) figure).getIcon();
} else {
return getLabelDelegate().getIcon(0);
}
}
/**
* @generated
*/
protected void setLabelIconHelper(IFigure figure, Image icon) {
if (figure instanceof WrappingLabel) {
((WrappingLabel) figure).setIcon(icon);
return;
} else if (figure instanceof Label) {
((Label) figure).setIcon(icon);
return;
} else {
getLabelDelegate().setIcon(icon, 0);
}
}
/**
* @generated NOT
*/
public void setLabel(WrappingLabel figure) {
figure.addMouseListener(new MediatorFigureSelectionListener(this
.getParent()));
unregisterVisuals();
setFigure(figure);
defaultText = getLabelTextHelper(figure);
registerVisuals();
refreshVisuals();
}
/**
* @generated
*/
@SuppressWarnings("rawtypes")
protected List getModelChildren() {
return Collections.EMPTY_LIST;
}
/**
* @generated
*/
public IGraphicalEditPart getChildBySemanticHint(String semanticHint) {
return null;
}
/**
* @generated
*/
protected EObject getParserElement() {
return resolveSemanticElement();
}
/**
* @generated
*/
protected Image getLabelIcon() {
return null;
}
/**
* @generated
*/
protected String getLabelText() {
String text = null;
EObject parserElement = getParserElement();
if (parserElement != null && getParser() != null) {
text = getParser().getPrintString(
new EObjectAdapter(parserElement),
getParserOptions().intValue());
}
if (text == null || text.length() == 0) {
text = defaultText;
}
return text;
}
/**
* @generated
*/
public void setLabelText(String text) {
setLabelTextHelper(getFigure(), text);
refreshSelectionFeedback();
}
/**
* @generated
*/
public String getEditText() {
if (getParserElement() == null || getParser() == null) {
return ""; //$NON-NLS-1$
}
return getParser().getEditString(
new EObjectAdapter(getParserElement()),
getParserOptions().intValue());
}
/**
* @generated
*/
protected boolean isEditable() {
return getParser() != null;
}
/**
* @generated
*/
public ICellEditorValidator getEditTextValidator() {
return new ICellEditorValidator() {
public String isValid(final Object value) {
if (value instanceof String) {
final EObject element = getParserElement();
final IParser parser = getParser();
try {
IParserEditStatus valid = (IParserEditStatus) getEditingDomain()
.runExclusive(
new RunnableWithResult.Impl<IParserEditStatus>() {
public void run() {
setResult(parser
.isValidEditString(
new EObjectAdapter(
element),
(String) value));
}
});
return valid.getCode() == ParserEditStatus.EDITABLE ? null
: valid.getMessage();
} catch (InterruptedException ie) {
ie.printStackTrace();
}
}
// shouldn't get here
return null;
}
};
}
/**
* @generated
*/
public IContentAssistProcessor getCompletionProcessor() {
if (getParserElement() == null || getParser() == null) {
return null;
}
return getParser().getCompletionProcessor(
new EObjectAdapter(getParserElement()));
}
/**
* @generated
*/
public ParserOptions getParserOptions() {
return ParserOptions.NONE;
}
/**
* @generated
*/
public IParser getParser() {
if (parser == null) {
parser = EsbParserProvider
.getParser(
EsbElementTypes.BuilderMediator_3591,
getParserElement(),
EsbVisualIDRegistry
.getType(org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.BuilderMediatorDescriptionEditPart.VISUAL_ID));
}
return parser;
}
/**
* @generated
*/
protected DirectEditManager getManager() {
if (manager == null) {
setManager(new TextDirectEditManager2(this, null,
EsbEditPartFactory.getTextCellEditorLocator(this)));
}
return manager;
}
/**
* @generated
*/
protected void setManager(DirectEditManager manager) {
this.manager = manager;
}
/**
* @generated
*/
protected void performDirectEdit() {
getManager().show();
}
/**
* @generated
*/
protected void performDirectEdit(Point eventLocation) {
if (getManager().getClass() == TextDirectEditManager2.class) {
((TextDirectEditManager2) getManager()).show(eventLocation
.getSWTPoint());
}
}
/**
* @generated
*/
private void performDirectEdit(char initialCharacter) {
if (getManager() instanceof TextDirectEditManager) {
((TextDirectEditManager) getManager()).show(initialCharacter);
} else //
if (getManager() instanceof TextDirectEditManager2) {
((TextDirectEditManager2) getManager()).show(initialCharacter);
} else //
{
performDirectEdit();
}
}
/**
* @generated
*/
protected void performDirectEditRequest(Request request) {
final Request theRequest = request;
try {
getEditingDomain().runExclusive(new Runnable() {
public void run() {
if (isActive() && isEditable()) {
if (theRequest
.getExtendedData()
.get(RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR) instanceof Character) {
Character initialChar = (Character) theRequest
.getExtendedData()
.get(RequestConstants.REQ_DIRECTEDIT_EXTENDEDDATA_INITIAL_CHAR);
performDirectEdit(initialChar.charValue());
} else if ((theRequest instanceof DirectEditRequest)
&& (getEditText().equals(getLabelText()))) {
DirectEditRequest editRequest = (DirectEditRequest) theRequest;
performDirectEdit(editRequest.getLocation());
} else {
performDirectEdit();
}
}
}
});
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* @generated
*/
protected void refreshVisuals() {
super.refreshVisuals();
refreshLabel();
refreshFont();
refreshFontColor();
refreshUnderline();
refreshStrikeThrough();
}
/**
* @generated
*/
protected void refreshLabel() {
setLabelTextHelper(getFigure(), getLabelText());
setLabelIconHelper(getFigure(), getLabelIcon());
refreshSelectionFeedback();
}
/**
* @generated
*/
protected void refreshUnderline() {
FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(
NotationPackage.eINSTANCE.getFontStyle());
if (style != null && getFigure() instanceof WrappingLabel) {
((WrappingLabel) getFigure()).setTextUnderline(style.isUnderline());
}
}
/**
* @generated
*/
protected void refreshStrikeThrough() {
FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(
NotationPackage.eINSTANCE.getFontStyle());
if (style != null && getFigure() instanceof WrappingLabel) {
((WrappingLabel) getFigure()).setTextStrikeThrough(style
.isStrikeThrough());
}
}
/**
* @generated
*/
protected void refreshFont() {
FontStyle style = (FontStyle) getFontStyleOwnerView().getStyle(
NotationPackage.eINSTANCE.getFontStyle());
if (style != null) {
FontData fontData = new FontData(style.getFontName(),
style.getFontHeight(), (style.isBold() ? SWT.BOLD
: SWT.NORMAL)
| (style.isItalic() ? SWT.ITALIC : SWT.NORMAL));
setFont(fontData);
}
}
/**
* @generated
*/
private void refreshSelectionFeedback() {
requestEditPolicyFeedbackRefresh(EditPolicy.PRIMARY_DRAG_ROLE);
requestEditPolicyFeedbackRefresh(EditPolicy.SELECTION_FEEDBACK_ROLE);
}
/**
* @generated
*/
private void requestEditPolicyFeedbackRefresh(String editPolicyKey) {
Object editPolicy = getEditPolicy(editPolicyKey);
if (editPolicy instanceof IRefreshableFeedbackEditPolicy) {
((IRefreshableFeedbackEditPolicy) editPolicy).refreshFeedback();
}
}
/**
* @generated
*/
protected void setFontColor(Color color) {
getFigure().setForegroundColor(color);
}
/**
* @generated
*/
protected void addSemanticListeners() {
if (getParser() instanceof ISemanticParser) {
EObject element = resolveSemanticElement();
parserElements = ((ISemanticParser) getParser())
.getSemanticElementsBeingParsed(element);
for (int i = 0; i < parserElements.size(); i++) {
addListenerFilter(
"SemanticModel" + i, this, (EObject) parserElements.get(i)); //$NON-NLS-1$
}
} else {
super.addSemanticListeners();
}
}
/**
* @generated
*/
protected void removeSemanticListeners() {
if (parserElements != null) {
for (int i = 0; i < parserElements.size(); i++) {
removeListenerFilter("SemanticModel" + i); //$NON-NLS-1$
}
} else {
super.removeSemanticListeners();
}
}
/**
* @generated
*/
protected AccessibleEditPart getAccessibleEditPart() {
if (accessibleEP == null) {
accessibleEP = new AccessibleGraphicalEditPart() {
public void getName(AccessibleEvent e) {
e.result = getLabelTextHelper(getFigure());
}
};
}
return accessibleEP;
}
/**
* @generated
*/
private View getFontStyleOwnerView() {
return getPrimaryView();
}
/**
* @generated
*/
private ILabelDelegate getLabelDelegate() {
if (labelDelegate == null) {
IFigure label = getFigure();
if (label instanceof WrappingLabel) {
labelDelegate = new WrappingLabelDelegate((WrappingLabel) label);
} else {
labelDelegate = new SimpleLabelDelegate((Label) label);
}
}
return labelDelegate;
}
/**
* @generated
*/
@Override
public Object getAdapter(Class key) {
if (ILabelDelegate.class.equals(key)) {
return getLabelDelegate();
}
return super.getAdapter(key);
}
/**
* @generated
*/
protected void addNotationalListeners() {
super.addNotationalListeners();
addListenerFilter("PrimaryView", this, getPrimaryView()); //$NON-NLS-1$
}
/**
* @generated
*/
protected void removeNotationalListeners() {
super.removeNotationalListeners();
removeListenerFilter("PrimaryView"); //$NON-NLS-1$
}
/**
* @generated
*/
protected void handleNotificationEvent(Notification event) {
Object feature = event.getFeature();
if (NotationPackage.eINSTANCE.getFontStyle_FontColor().equals(feature)) {
Integer c = (Integer) event.getNewValue();
setFontColor(DiagramColorRegistry.getInstance().getColor(c));
} else if (NotationPackage.eINSTANCE.getFontStyle_Underline().equals(
feature)) {
refreshUnderline();
} else if (NotationPackage.eINSTANCE.getFontStyle_StrikeThrough()
.equals(feature)) {
refreshStrikeThrough();
} else if (NotationPackage.eINSTANCE.getFontStyle_FontHeight().equals(
feature)
|| NotationPackage.eINSTANCE.getFontStyle_FontName().equals(
feature)
|| NotationPackage.eINSTANCE.getFontStyle_Bold()
.equals(feature)
|| NotationPackage.eINSTANCE.getFontStyle_Italic().equals(
feature)) {
refreshFont();
} else {
if (getParser() != null
&& getParser().isAffectingEvent(event,
getParserOptions().intValue())) {
refreshLabel();
}
if (getParser() instanceof ISemanticParser) {
ISemanticParser modelParser = (ISemanticParser) getParser();
if (modelParser.areSemanticElementsAffected(null, event)) {
removeSemanticListeners();
if (resolveSemanticElement() != null) {
addSemanticListeners();
}
refreshLabel();
}
}
}
super.handleNotificationEvent(event);
}
/**
* @generated
*/
protected IFigure createFigure() {
// Parent should assign one using setLabel() method
return null;
}
}
| |
package com.esri.ges.manager.routes.internal;
import java.io.ByteArrayOutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.type.TypeReference;
import com.esri.ges.datastore.agsconnection.KeyValue;
import com.esri.ges.datastore.agsconnection.Location;
import com.esri.ges.datastore.agsconnection.NamedGeometry;
import com.esri.ges.datastore.agsconnection.SolvedRoute;
import com.esri.ges.spatial.Geometry;
import com.esri.ges.spatial.GeometryException;
import com.esri.ges.spatial.Spatial;
public class NetworkAnalystServerConnection
{
private static final Log log = LogFactory.getLog(NetworkAnalystServerConnection.class);
final static Object[] geometryPath = new Object[] { "geometry" };
private Spatial spatial;
final static Object[] attributesPath = new Object[] { "attributes" };
final static Object[] wkidPath = new Object[] {"spatialReference", "wkid" };
private Http localhttp;
private URL url;
private static final int defaultTimeout = 30000;
public NetworkAnalystServerConnection(Spatial spatial, URL url)
{
this.spatial = spatial;
this.url = url;
}
public SolvedRoute solveRoute(String path, List<Location> locations, boolean optimize, Date startTime)
{
StringBuffer urlString = new StringBuffer();
urlString.append( url.toExternalForm() );
urlString.append( path );
Collection<KeyValue> params = new ArrayList<KeyValue>();
params.add( new KeyValue( "f", "json" ) );
params.add( new KeyValue( "stops", convertToFeaturesJson( locations, false ) ) );
params.add( new KeyValue( "ignoreInvalidLocations", "false") );
params.add( new KeyValue( "returnRoutes", "true" ) );
params.add( new KeyValue( "returnStops", "true" ) );
params.add( new KeyValue( "outputLines", "esriNAOutputLineTrueShape" ) );
params.add( new KeyValue( "preserveFirstStop", "true" ) );
params.add( new KeyValue( "preserveLastStop", "true" ) );
if(startTime != null)
params.add( new KeyValue( "startTime", Long.toString(startTime.getTime()) ) );
if( optimize )
{
params.add( new KeyValue( "findBestSequence", "true" ) );
}
else
{
params.add( new KeyValue( "findBestSequence", "false" ) );
}
try
{
URL url = new URL( urlString.toString() );
localhttp = new Http();
String reply = localhttp.post(url, params, defaultTimeout );
if( reply != null )
{
return parseRouteSolverReply( reply );
}
log.error( "Did not get back a valid response from NA solve call." );
}
catch( Exception e )
{
log.error("Failed trying to send request to NA.", e );
}
return null;
}
private SolvedRoute parseRouteSolverReply(String reply)
{
SolvedRoute solvedRoute = null;
ObjectMapper mapper = new ObjectMapper();
try
{
JsonNode response = mapper.readTree( reply );
List<Location> locations = processStopsFromReply( getNodeFollowingPath( response, new Object[] { "stops"} ) );
List<NamedGeometry> shapes = getGeometriesFromNAReply( getNodeFollowingPath( response, new Object[] { "routes" } ) );
solvedRoute = new SolvedRoute();
solvedRoute.setLocations( locations );
solvedRoute.setRoutes( shapes );
}
catch( Exception e )
{
throw new RuntimeException( e );
}
return solvedRoute;
}
private List<NamedGeometry> getGeometriesFromNAReply( JsonNode jsonNode ) throws GeometryException
{
if( jsonNode == null )
{
log.error( "Could not find routes node in Json returned by Network Analyst" );
return null;
}
int wkid = getNodeFollowingPath( jsonNode, wkidPath ).getIntValue();
String wkidStr = Integer.toString( wkid );
String geometryString;
Geometry geometryObject;
List<NamedGeometry> retList = new ArrayList<NamedGeometry>();
NamedGeometry newNamedGeometry;
for( JsonNode feature : getNodeFollowingPath(jsonNode, new Object[] { "features" } ) )
{
geometryString = geometryStringFromJsonNode( getNodeFollowingPath( feature, geometryPath ), wkidStr );
geometryObject = spatial.fromJson( geometryString );
newNamedGeometry = new NamedGeometry( getNodeFollowingPath(feature,new Object[] { "attributes", "Name" }).asText(),
geometryObject, true,
getAttributesFromNode( getNodeFollowingPath( feature, attributesPath) ) );
retList.add( newNamedGeometry );
}
return retList;
}
private String geometryStringFromJsonNode( JsonNode geometry, String outSR )
{
String geometryString = geometry.toString();
return geometryString.substring(0, geometryString.length()-1) + ",\"spatialReference\":{\"wkid\":"+outSR+"}}";
}
private List<Location> processStopsFromReply(JsonNode jsonNode )
{
if( jsonNode == null )
{
log.error( "Could not find stops node in Json returned by Network Analyst" );
return null;
}
int wkid = getNodeFollowingPath( jsonNode, new Object[] { "spatialReference", "wkid" } ).getIntValue();
ArrayList<Location> retLocations = new ArrayList<Location>(jsonNode.get( "features" ).size());
JsonNode geometryNode;
Location newLocation;
// Integer sequence;
for( JsonNode feature : jsonNode.get( "features" ) )
{
geometryNode = getNodeFollowingPath( feature, geometryPath );
newLocation = new Location();
newLocation.setPoint( spatial.createPoint( geometryNode.get( "x" ).asDouble(), geometryNode.get( "y" ).asDouble(), wkid ) );
newLocation.setAttributes( getAttributesFromNode( getNodeFollowingPath( feature, attributesPath) ) );
// sequence = (Integer)newLocation.getAttributes().get( "Sequence" );
retLocations.add( newLocation );
}
Collections.sort(retLocations, new CustomComparator());
return retLocations;
}
private Map<String, Object> getAttributesFromNode(JsonNode attributesNode )
{
Map<String,Object> retMap = new HashMap<String,Object>();
Iterator<String> fieldNames = attributesNode.getFieldNames();
String fieldName;
JsonNode currNode;
while( fieldNames.hasNext() )
{
fieldName = fieldNames.next();
currNode = attributesNode.get( fieldName );
retMap.put( fieldName, getObjectFromNode( currNode ) );
}
return retMap;
}
private Object getObjectFromNode(JsonNode node)
{
if( node.isInt() )
{
return node.asInt();
}
if( node.isTextual() )
{
return node.asText();
}
if( node.isBoolean() )
{
return node.asBoolean();
}
if( node.isDouble() )
{
return node.asDouble();
}
return null;
}
private JsonNode getNodeFollowingPath( JsonNode jsonNode, Object[] nodePath )
{
for( Object property : nodePath )
{
if( property instanceof String )
{
jsonNode = jsonNode.get( (String)property );
}
else if( property instanceof Integer )
{
Integer index = (Integer)property;
jsonNode = jsonNode.get( index );
}
if( jsonNode == null )
{
break;
}
}
return jsonNode;
}
private String convertToFeaturesJson( List<Location> locations, boolean addSequence )
{
StringBuffer sb = new StringBuffer();
sb.append("{\"type\":\"features\",\"features\":[");
Integer locationIndex=0;
Map<String,Object> attributes;
Map<String,Object> emptyHashMap = null;
for( Location location : locations )
{
if( locationIndex != 0 )
{
sb.append( ',' );
}
locationIndex++;
sb.append( "{\"geometry\":" );
sb.append( removeZFromGeom( location.getPoint().toJson() ) );
attributes = location.getAttributes();
if( attributes == null )
{
if( emptyHashMap == null )
{
emptyHashMap = new HashMap<String,Object>();
}
attributes = emptyHashMap;
attributes.put( "Sequence", locationIndex );
}
if( addSequence && attributes != emptyHashMap )
{
attributes.put( "Sequence", locationIndex );
}
sb.append( ",\"attributes\":{" );
int index = 0;
for( String key : attributes.keySet() )
{
if( index != 0 )
{
sb.append( ',' );
}
index++;
sb.append( '"' );
sb.append( key );
sb.append( "\":");
Object value = attributes.get( key );
if( value == null )
{
sb.append( "null" );
}
else
{
boolean isString = value instanceof String;
if( isString )
{
sb.append( '"' );
}
sb.append( value.toString() );
if( isString )
{
sb.append( '"' );
}
}
}
sb.append( "}" );
sb.append( '}' );
}
sb.append("]}");
return sb.toString();
}
private String removeZFromGeom( String geomString )
{
geomString = new String( geomString );
JsonFactory factory = new JsonFactory();
ObjectMapper mapper = new ObjectMapper(factory);
JsonParser parser;
try
{
parser = factory.createJsonParser( geomString.getBytes() );
TypeReference<HashMap<String,Object>> typeRef = new TypeReference<HashMap<String,Object>>() {};
HashMap<String,Object> o = mapper.readValue(parser, typeRef);
if( o.containsKey( "z" ) )
{
o.remove( "z" );
ByteArrayOutputStream baos = new ByteArrayOutputStream();
mapper.writeValue( baos, o );
geomString = baos.toString();
}
}
catch( Exception e )
{
throw new RuntimeException( e );
}
return geomString;
}
public class CustomComparator implements Comparator<Location>
{
@Override
public int compare(Location o1, Location o2)
{
Integer seq1 = (Integer)o1.getAttributes().get( "Sequence" );
Integer seq2 = (Integer)o2.getAttributes().get( "Sequence" );
return seq1.compareTo(seq2);
}
}
}
| |
/**
* CSSMin takes in well-formed, human-readable CSS and reduces its size substantially.
* It removes unnecessary whitespace and comments, and orders the contents of CSS
* selectors alphabetically to enhance GZIP compression.
*
* Originally by Barry van Oudtshoorn, with bug reports, fixes, and contributions by
* <ul>
* <li>Kevin de Groote</li>
* <li>Pedro Pinheiro</li>
* </ul>
* Some code is based on the YUI CssCompressor code, by Julien Lecomte.
*
* @author Barry van Oudtshoorn
*/
import java.util.*;
import java.util.regex.*;
import java.io.*;
import java.lang.*;
public class CSSMin {
protected static boolean bDebug = false;
/**
* Main entry point for CSSMin from the command-line.
* <b>Usage:</b> CSSMin <i>[Input file]</i>, <i>[Output file]</i>, <i>[DEBUG]</i>
* @param args The command-line arguments
*/
public static void main(String[] args) {
if (args.length < 1) {
System.out.println("Usage: ");
System.out.println("CSSMin [Input file] [Output file] [DEBUG]");
System.out.println("If no output file is specified, stdout will be used.");
return;
}
bDebug = (args.length > 2);
PrintStream out;
if (args.length > 1) {
try {
out = new PrintStream(args[1]);
} catch (Exception e) {
System.err.println("Error outputting to " + args[1] + "; redirecting to stdout");
out = System.out;
}
} else {
out = System.out;
}
formatFile(args[0], out);
}
/**
* Process a file from a filename.
* @param filename The file name of the CSS file to process.
* @param out Where to send the result
*/
public static void formatFile(String filename, OutputStream out) {
try {
formatFile(new FileReader(filename), out);
} catch (java.io.FileNotFoundException e) {
System.out.println(e.getMessage());
}
}
/**
* Process input from a reader.
* @param input Where to read the CSS from
* @param output Where to send the result
*/
public static void formatFile(Reader input, OutputStream out) {
formatFile(input, new PrintStream(out));
}
/**
* Minify CSS from a reader to a printstream.
* @param input Where to read the CSS from
* @param out Where to write the result to
*/
public static void formatFile(Reader input, PrintStream out) {
try {
int k,
j, // Number of open braces
n; // Current position in stream
char curr;
BufferedReader br = new BufferedReader(input);
StringBuffer sb = new StringBuffer();
if (bDebug) {
System.err.println("Reading file into StringBuffer...");
}
String s;
while ((s = br.readLine()) != null) {
if (s.trim().equals("")) continue;
sb.append(s);
}
if (bDebug) {
System.err.println("Removing comments...");
}
// Find the start of the comment
while ((n = sb.indexOf("/*")) != -1) {
if (sb.charAt(n + 2) == '*') { // Retain special comments
n += 2;
continue;
}
k = sb.indexOf("*/", n + 2);
if (k == -1) {
throw new Exception("Unterminated comment. Aborting.");
}
sb.delete(n, k + 2);
}
if (bDebug) {
System.err.println(sb.toString());
System.err.println("\n\n");
}
if (bDebug) {
System.err.println("Parsing and processing selectors...");
}
Vector<Selector> selectors = new Vector<Selector>();
n = 0;
j = 0;
k = 0;
for (int i = 0; i < sb.length(); i++) {
curr = sb.charAt(i);
if (j < 0) {
throw new Exception("Unbalanced braces!");
}
if (curr == '{') {
j++;
} else if (curr == '}') {
j--;
if (j == 0) {
try {
selectors.addElement(new Selector(sb.substring(n, i + 1)));
} catch (Exception e) {
System.out.println(e.getMessage());
}
n = i + 1;
}
}
}
for (Selector selector : selectors) {
out.print(selector.toString());
}
out.print("\r\n");
out.close();
if (bDebug) {
System.err.println("Process completed successfully.");
}
} catch (Exception e) {
e.printStackTrace(System.err);
System.out.println(e.getMessage());
}
}
}
class Selector {
private Property[] properties = null;
private Vector<Selector> subSelectors = null;
private String selector;
/**
* Creates a new Selector using the supplied strings.
* @param selector The selector; for example, "div { border: solid 1px red; color: blue; }"
* @throws Exception If the selector is incomplete and cannot be parsed.
*/
public Selector(String selector) throws Exception {
String[] parts = selector.split("\\{"); // We have to escape the { with a \ for the regex, which itself requires escaping for the string. Sigh.
if (parts.length < 2) {
throw new Exception("Warning: Incomplete selector: " + selector);
}
this.selector = parts[0].toString().trim();
// Simplify combinators
this.selector = this.selector.replaceAll("\\s?(\\+|~|,|=|~=|\\^=|\\$=|\\*=|\\|=|>)\\s?", "$1");
// We're dealing with a nested property, eg @-webkit-keyframes
if (parts.length > 2) {
this.subSelectors = new Vector<Selector>();
parts = selector.split("\\{|\\}");
for (int i = 1; i < parts.length; i += 2) {
parts[i] = parts[i].trim();
parts[i + 1] = parts[i + 1].trim();
if (!(parts[i].equals("") || (parts[i + 1].equals("")))) {
this.subSelectors.addElement(new Selector(parts[i] + "{" + parts[i + 1] + "}"));
}
}
} else {
String contents = parts[parts.length - 1].trim();
if (CSSMin.bDebug) {
System.err.println("Parsing selector: " + this.selector);
System.err.println("\t" + contents);
}
if (contents.charAt(contents.length() - 1) != '}') { // Ensure that we have a leading and trailing brace.
throw new Exception("\tUnterminated selector: " + selector);
}
if (contents.length() == 1) {
throw new Exception("\tEmpty selector body: " + selector);
}
contents = contents.substring(0, contents.length() - 2);
this.properties = parseProperties(contents);
sortProperties(this.properties);
}
}
/**
* Prints out this selector and its contents nicely, with the contents sorted alphabetically.
* @returns A string representing this selector, minified.
*/
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append(this.selector).append("{");
if (this.subSelectors != null) {
for (Selector s : this.subSelectors) {
sb.append(s.toString());
}
}
if (this.properties != null) {
for (Property p : this.properties) {
sb.append(p.toString());
}
}
if (sb.charAt(sb.length() - 1) == ';') {
sb.deleteCharAt(sb.length() - 1);
}
sb.append("}");
return sb.toString();
}
/**
* Parses out the properties of a selector's body.
* @param contents The body; for example, "border: solid 1px red; color: blue;"
* @returns An array of properties parsed from this selector.
*/
private Property[] parseProperties(String contents) {
ArrayList<String> parts = new ArrayList<String>();
boolean bCanSplit = true;
int j = 0;
String substr;
for (int i = 0; i < contents.length(); i++) {
if (!bCanSplit) { // If we're inside a string
bCanSplit = (contents.charAt(i) == '"');
} else if (contents.charAt(i) == '"') {
bCanSplit = false;
} else if (contents.charAt(i) == ';') {
substr = contents.substring(j, i);
if (!(substr.trim().equals("") || (substr == null))) parts.add(substr);
j = i + 1;
}
}
substr = contents.substring(j, contents.length());
if (!(substr.trim().equals("") || (substr == null))) parts.add(substr);
Property[] results = new Property[parts.size()];
for (int i = 0; i < parts.size(); i++) {
try {
results[i] = new Property(parts.get(i));
} catch (Exception e) {
System.out.println(e.getMessage());
results[i] = null;
}
}
return results;
}
/**
* Sorts the properties array to enhance gzipping.
* @param properties The array to be sorted.
*/
private void sortProperties(Property[] properties) {
Arrays.sort(properties);
}
}
class Property implements Comparable<Property> {
protected String property;
protected Part[] parts;
/**
* Creates a new Property using the supplied strings. Parses out the values of the property selector.
* @param property The property; for example, "border: solid 1px red;" or "-moz-box-shadow: 3px 3px 3px rgba(255, 255, 0, 0.5);".
* @throws Exception If the property is incomplete and cannot be parsed.
*/
public Property(String property) throws Exception {
try {
// Parse the property.
ArrayList<String> parts = new ArrayList<String>();
boolean bCanSplit = true;
int j = 0;
String substr;
if (CSSMin.bDebug) {
System.err.println("\t\tExamining property: " + property);
}
for (int i = 0; i < property.length(); i++) {
if (!bCanSplit) { // If we're inside a string
bCanSplit = (property.charAt(i) == '"');
} else if (property.charAt(i) == '"') {
bCanSplit = false;
} else if (property.charAt(i) == ':') {
substr = property.substring(j, i);
if (!(substr.trim().equals("") || (substr == null))) parts.add(substr);
j = i + 1;
}
}
substr = property.substring(j, property.length());
if (!(substr.trim().equals("") || (substr == null))) parts.add(substr);
if (parts.size() < 2) {
throw new Exception("\t\tWarning: Incomplete property: " + property);
}
this.property = parts.get(0).trim().toLowerCase();
this.parts = parseValues(simplifyColours(parts.get(1).trim().replaceAll(", ", ",")));
} catch (PatternSyntaxException e) {
// Invalid regular expression used.
}
}
/**
* Prints out this property nicely.
* @returns A string representing this property, minified.
*/
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append(this.property).append(":");
for (Part p : this.parts) {
sb.append(p.toString()).append(",");
}
sb.deleteCharAt(sb.length() - 1); // Delete the trailing comma.
sb.append(";");
if (CSSMin.bDebug) {
System.err.println(sb.toString());
}
return sb.toString();
}
/**
* Compare this property with another.
*/
public int compareTo(Property other) {
// We can't just use String.compareTo(), because we need to sort properties that have hack prefixes last -- eg, *display should come after display.
String thisProp = this.property;
String thatProp = other.property;
String[] parts;
if (thisProp.charAt(0) == '-') {
thisProp = thisProp.substring(1);
thisProp = thisProp.substring(thisProp.indexOf('-') + 1);
} else if (thisProp.charAt(0) < 65) {
thisProp = thisProp.substring(1);
}
if (thatProp.charAt(0) == '-') {
thatProp = thatProp.substring(1);
thatProp = thatProp.substring(thatProp.indexOf('-') + 1);
} else if (thatProp.charAt(0) < 65) {
thatProp = thatProp.substring(1);
}
return thisProp.compareTo(thatProp);
}
/**
* Parse the values out of a property.
* @param contents The property to parse
* @returns An array of Parts
*/
private Part[] parseValues(String contents) {
String[] parts = contents.split(",");
Part[] results = new Part[parts.length];
for (int i = 0; i < parts.length; i++) {
try {
results[i] = new Part(parts[i]);
} catch (Exception e) {
System.out.println(e.getMessage());
results[i] = null;
}
}
return results;
}
private String simplifyColours(String contents) {
// This replacement, although it results in a smaller uncompressed file,
// actually makes the gzipped file bigger -- people tend to use rgba(0,0,0,0.x)
// quite a lot, which means that rgba(0,0,0,0) has its first eight or so characters
// compressed really efficiently; much more so than "transparent".
//contents = contents.replaceAll("rgba\\(0,0,0,0\\)", "transparent");
return simplifyRGBColours(contents);
}
// Convert rgb(51,102,153) to #336699 (this code largely based on YUI code)
private String simplifyRGBColours(String contents) {
StringBuffer newContents = new StringBuffer();
StringBuffer hexColour;
String[] rgbColours;
int colourValue;
Pattern pattern = Pattern.compile("rgb\\s*\\(\\s*([0-9,\\s]+)\\s*\\)");
Matcher matcher = pattern.matcher(contents);
while (matcher.find()) {
hexColour = new StringBuffer("#");
rgbColours = matcher.group(1).split(",");
for (int i = 0; i < rgbColours.length; i++) {
colourValue = Integer.parseInt(rgbColours[i]);
if (colourValue < 16) {
hexColour.append("0");
}
hexColour.append(Integer.toHexString(colourValue));
}
matcher.appendReplacement(newContents, hexColour.toString());
}
matcher.appendTail(newContents);
return newContents.toString();
}
}
class Part {
String contents;
/**
* Create a new property by parsing the given string.
* @param contents The string to parse.
* @throws Exception If the part cannot be parsed.
*/
public Part(String contents) throws Exception {
// Many of these regular expressions are adapted from those used in the YUI CSS Compressor.
// For simpler regexes.
this.contents = " " + contents;
simplify();
}
private void simplify() {
// !important doesn't need to be spaced
this.contents = this.contents.replaceAll(" !important", "!important");
// Replace 0in, 0cm, etc. with just 0
this.contents = this.contents.replaceAll("(\\s)(0)(px|em|%|in|cm|mm|pc|pt|ex)", "$1$2");
// Replace 0.6 with .6
// Disabled, as it actually makes compression worse! People use rgba(0,0,0,0) and rgba(0,0,0,0.x) a lot.
//this.contents = this.contents.replaceAll("(\\s)0+\\.(\\d+)", "$1.$2");
this.contents = this.contents.trim();
// Simplify multiple zeroes
if (this.contents.equals("0 0 0 0")) this.contents = "0";
if (this.contents.equals("0 0 0")) this.contents = "0";
if (this.contents.equals("0 0")) this.contents = "0";
// Simplify multiple-parameter properties
simplifyParameters();
// Simplify font weights
simplifyFontWeights();
// Strip unnecessary quotes from url() and single-word parts, and make as much lowercase as possible.
simplifyQuotesAndCaps();
// Simplify colours
simplifyColourNames();
simplifyHexColours();
}
private void simplifyParameters() {
StringBuffer newContents = new StringBuffer();
String[] params = this.contents.split(" ");
if (params.length == 4) {
// We can drop off the fourth item if the second and fourth items match
// ie turn 3px 0 3px 0 into 3px 0 3px
if (params[1].equalsIgnoreCase(params[3])) {
params = Arrays.copyOf(params, 3);
}
}
if (params.length == 3) {
// We can drop off the third item if the first and third items match
// ie turn 3px 0 3px into 3px 0
if (params[0].equalsIgnoreCase(params[2])) {
params = Arrays.copyOf(params, 2);
}
}
if (params.length == 2) {
// We can drop off the second item if the first and second items match
// ie turn 3px 3px into 3px
if (params[0].equalsIgnoreCase(params[1])) {
params = Arrays.copyOf(params, 1);
}
}
for (int i = 0; i < params.length; i++) {
newContents.append(params[i] + " ");
}
newContents.deleteCharAt(newContents.length() - 1); // Delete the trailing space
this.contents = newContents.toString();
}
private void simplifyFontWeights() {
String lcContents = this.contents.toLowerCase();
for (int i = 0; i < Constants.fontWeightNames.length; i++) {
if (lcContents.equals(Constants.fontWeightNames[i])) {
this.contents = Constants.fontWeightValues[i];
break;
}
}
}
private void simplifyQuotesAndCaps() {
// Strip quotes from URLs
if ((this.contents.length() > 4) && (this.contents.substring(0, 4).equalsIgnoreCase("url("))) {
this.contents = this.contents.replaceAll("(?i)url\\(('|\")?(.*?)\\1\\)", "url($2)");
} else {
String[] words = this.contents.split("\\s");
if (words.length == 1) {
this.contents = this.contents.toLowerCase();
this.contents = this.contents.replaceAll("('|\")?(.*?)\1", "$2");
}
}
}
private void simplifyColourNames() {
String lcContents = this.contents.toLowerCase();
for (int i = 0; i < Constants.htmlColourNames.length; i++) {
if (lcContents.equals(Constants.htmlColourNames[i])) {
if (Constants.htmlColourValues[i].length() < Constants.htmlColourNames[i].length()) {
this.contents = Constants.htmlColourValues[i];
}
break;
} else if (lcContents.equals(Constants.htmlColourValues[i])) {
if (Constants.htmlColourNames[i].length() < Constants.htmlColourValues[i].length()) {
this.contents = Constants.htmlColourNames[i];
}
}
}
}
private void simplifyHexColours() {
StringBuffer newContents = new StringBuffer();
Pattern pattern = Pattern.compile("#([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])");
Matcher matcher = pattern.matcher(this.contents);
while (matcher.find()) {
if (matcher.group(1).equalsIgnoreCase(matcher.group(2)) && matcher.group(3).equalsIgnoreCase(matcher.group(4)) && matcher.group(5).equalsIgnoreCase(matcher.group(6))) {
matcher.appendReplacement(newContents, "#" + matcher.group(1).toLowerCase() + matcher.group(3).toLowerCase() + matcher.group(5).toLowerCase());
} else {
matcher.appendReplacement(newContents, matcher.group().toLowerCase());
}
}
matcher.appendTail(newContents);
this.contents = newContents.toString();
}
/**
* Returns itself.
* @returns this part's string representation.
*/
public String toString() {
return this.contents;
}
}
class Constants {
static final String[] htmlColourNames = {
"aliceblue",
"antiquewhite",
"aqua",
"aquamarine",
"azure",
"beige",
"bisque",
"black",
"blanchedalmond",
"blue",
"blueviolet",
"brown",
"burlywood",
"cadetblue",
"chartreuse",
"chocolate",
"coral",
"cornflowerblue",
"cornsilk",
"crimson",
"cyan",
"darkblue",
"darkcyan",
"darkgoldenrod",
"darkgray",
"darkgreen",
"darkkhaki",
"darkmagenta",
"darkolivegreen",
"darkorange",
"darkorchid",
"darkred",
"darksalmon",
"darkseagreen",
"darkslateblue",
"darkslategray",
"darkturquoise",
"darkviolet",
"deeppink",
"deepskyblue",
"dimgray",
"dodgerblue",
"firebrick",
"floralwhite",
"forestgreen",
"fuchsia",
"gainsboro",
"ghostwhite",
"gold",
"goldenrod",
"gray",
"green",
"greenyellow",
"honeydew",
"hotpink",
"indianred ",
"indigo ",
"ivory",
"khaki",
"lavender",
"lavenderblush",
"lawngreen",
"lemonchiffon",
"lightblue",
"lightcoral",
"lightcyan",
"lightgoldenrodyellow",
"lightgrey",
"lightgreen",
"lightpink",
"lightsalmon",
"lightseagreen",
"lightskyblue",
"lightslategray",
"lightsteelblue",
"lightyellow",
"lime",
"limegreen",
"linen",
"magenta",
"maroon",
"mediumaquamarine",
"mediumblue",
"mediumorchid",
"mediumpurple",
"mediumseagreen",
"mediumslateblue",
"mediumspringgreen",
"mediumturquoise",
"mediumvioletred",
"midnightblue",
"mintcream",
"mistyrose",
"moccasin",
"navajowhite",
"navy",
"oldlace",
"olive",
"olivedrab",
"orange",
"orangered",
"orchid",
"palegoldenrod",
"palegreen",
"paleturquoise",
"palevioletred",
"papayawhip",
"peachpuff",
"peru",
"pink",
"plum",
"powderblue",
"purple",
"red",
"rosybrown",
"royalblue",
"saddlebrown",
"salmon",
"sandybrown",
"seagreen",
"seashell",
"sienna",
"silver",
"skyblue",
"slateblue",
"slategray",
"snow",
"springgreen",
"steelblue",
"tan",
"teal",
"thistle",
"tomato",
"turquoise",
"violet",
"wheat",
"white",
"whitesmoke",
"yellow",
"yellowgreen"
};
static final String[] htmlColourValues = {
"#f0f8ff",
"#faebd7",
"#00ffff",
"#7fffd4",
"#f0ffff",
"#f5f5dc",
"#ffe4c4",
"#000",
"#ffebcd",
"#00f",
"#8a2be2",
"#a52a2a",
"#deb887",
"#5f9ea0",
"#7fff00",
"#d2691e",
"#ff7f50",
"#6495ed",
"#fff8dc",
"#dc143c",
"#0ff",
"#00008b",
"#008b8b",
"#b8860b",
"#a9a9a9",
"#006400",
"#bdb76b",
"#8b008b",
"#556b2f",
"#ff8c00",
"#9932cc",
"#8b0000",
"#e9967a",
"#8fbc8f",
"#483d8b",
"#2f4f4f",
"#00ced1",
"#9400d3",
"#ff1493",
"#00bfff",
"#696969",
"#1e90ff",
"#b22222",
"#fffaf0",
"#228b22",
"#f0f",
"#dcdcdc",
"#f8f8ff",
"#ffd700",
"#daa520",
"#808080",
"#008000",
"#adff2f",
"#f0fff0",
"#ff69b4",
"#cd5c5c",
"#4b0082",
"#fffff0",
"#f0e68c",
"#e6e6fa",
"#fff0f5",
"#7cfc00",
"#fffacd",
"#add8e6",
"#f08080",
"#e0ffff",
"#fafad2",
"#d3d3d3",
"#90ee90",
"#ffb6c1",
"#ffa07a",
"#20b2aa",
"#87cefa",
"#789",
"#b0c4de",
"#ffffe0",
"#0f0",
"#32cd32",
"#faf0e6",
"#f0f",
"#800000",
"#66cdaa",
"#0000cd",
"#ba55d3",
"#9370d8",
"#3cb371",
"#7b68ee",
"#00fa9a",
"#48d1cc",
"#c71585",
"#191970",
"#f5fffa",
"#ffe4e1",
"#ffe4b5",
"#ffdead",
"#000080",
"#fdf5e6",
"#808000",
"#6b8e23",
"#ffa500",
"#ff4500",
"#da70d6",
"#eee8aa",
"#98fb98",
"#afeeee",
"#d87093",
"#ffefd5",
"#ffdab9",
"#cd853f",
"#ffc0cb",
"#dda0dd",
"#b0e0e6",
"#800080",
"#f00",
"#bc8f8f",
"#4169e1",
"#8b4513",
"#fa8072",
"#f4a460",
"#2e8b57",
"#fff5ee",
"#a0522d",
"#c0c0c0",
"#87ceeb",
"#6a5acd",
"#708090",
"#fffafa",
"#00ff7f",
"#4682b4",
"#d2b48c",
"#008080",
"#d8bfd8",
"#ff6347",
"#40e0d0",
"#ee82ee",
"#f5deb3",
"#fff",
"#f5f5f5",
"#ff0",
"#9acd32"
};
static final String[] fontWeightNames = {
"normal",
"bold",
"bolder",
"lighter"
};
static final String[] fontWeightValues = {
"400",
"700",
"900",
"100"
};
}
| |
/**
* Copyright 2004-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.hr.lm.leaveCalendar.validation;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.kuali.hr.KPMEWebTestCase;
import org.kuali.kpme.core.FunctionalTest;
import org.kuali.kpme.tklm.api.leave.block.LeaveBlock;
import org.kuali.kpme.tklm.api.leave.block.LeaveBlockContract;
import org.kuali.kpme.tklm.leave.block.LeaveBlockBo;
import org.kuali.kpme.tklm.leave.calendar.validation.LeaveCalendarValidationUtil;
import org.kuali.kpme.tklm.leave.summary.LeaveSummary;
import org.kuali.kpme.tklm.leave.summary.LeaveSummaryRow;
@FunctionalTest
public class LeaveCalendarValidationServiceTest extends KPMEWebTestCase {
@Before
public void setUp() throws Exception {
super.setUp();
}
@After
public void tearDown() throws Exception {
super.tearDown();
}
@Test
public void testValidateAvailableLeaveBalance() throws Exception {
LeaveSummary ls = new LeaveSummary();
LeaveSummaryRow lsr = new LeaveSummaryRow();
lsr.setAccrualCategory("testAC");
lsr.setAccrualCategoryId("5000");
lsr.setLeaveBalance(new BigDecimal(5));
List<LeaveSummaryRow> lsrList = new ArrayList<LeaveSummaryRow>();
lsrList.add(lsr);
ls.setLeaveSummaryRows(lsrList);
// adding brand new leave blocks
// earn code "EC" does not allow negative accrual balance
List<String> errors = LeaveCalendarValidationUtil.validateAvailableLeaveBalanceForUsage("EC", "02/15/2012", "02/15/2012", new BigDecimal(8), null);
Assert.assertEquals("Incorrect number of error messages", 1, errors.size());
String anError = errors.get(0);
Assert.assertTrue("error message not correct" , anError.equals("Requested leave amount 8 is greater than available leave balance 0.00"));
// earn code "EC1" allows negative accrual balance
errors = LeaveCalendarValidationUtil.validateAvailableLeaveBalanceForUsage("EC1", "02/15/2012", "02/15/2012", new BigDecimal(8), null);
Assert.assertTrue("There should NOT be error message(s)" , errors.isEmpty());
//updating an existing leave block
LeaveBlock.Builder aLeaveBlock = LeaveBlock.Builder.create("xxx", "EC", new BigDecimal(-10));
//aLeaveBlock.setEarnCode("EC");
//aLeaveBlock.setLeaveAmount(new BigDecimal(-10));
errors = LeaveCalendarValidationUtil.validateAvailableLeaveBalanceForUsage("EC", "02/15/2012", "02/15/2012", new BigDecimal(3), aLeaveBlock.build());
Assert.assertTrue("There should NOT be error message(s)" , errors.isEmpty());
aLeaveBlock.setLeaveAmount(new BigDecimal(-2));
errors = LeaveCalendarValidationUtil.validateAvailableLeaveBalanceForUsage("EC", "02/15/2012", "02/15/2012", new BigDecimal(10), aLeaveBlock.build());
anError = errors.get(0);
Assert.assertTrue("error message not correct" , anError.equals("Requested leave amount 10 is greater than available leave balance 2.00"));
}
@Test
public void testValidateLeaveSpanOverMaxUsageRule() throws Exception {
LeaveSummary ls = new LeaveSummary();
LeaveSummaryRow lsr = new LeaveSummaryRow();
lsr.setAccrualCategory("testAC");
lsr.setUsageLimit(new BigDecimal(39));
List<LeaveSummaryRow> lsrList = new ArrayList<LeaveSummaryRow>();
lsrList.add(lsr);
ls.setLeaveSummaryRows(lsrList);
// adding brand new leave blocks
List<String> errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/19/2012", new BigDecimal(8), null);
Assert.assertEquals("There should be 1 error message" , 1, errors.size());
String anError = errors.get(0);
Assert.assertTrue("error message not correct" , anError.equals("This leave request would exceed the usage limit for " + lsr.getAccrualCategory()));
}
@Test
public void testValidateLeaveSpanUnderMaxUsageRule() throws Exception {
LeaveSummary ls = new LeaveSummary();
LeaveSummaryRow lsr = new LeaveSummaryRow();
lsr.setAccrualCategory("testAC");
lsr.setUsageLimit(new BigDecimal(41));
List<LeaveSummaryRow> lsrList = new ArrayList<LeaveSummaryRow>();
lsrList.add(lsr);
ls.setLeaveSummaryRows(lsrList);
// adding brand new leave blocks
List<String> errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/19/2012", new BigDecimal(8), null);
Assert.assertEquals("There should be no error message" , 0, errors.size());
}
@Test
public void testValidateLeaveSpanEqualMaxUsageRule() throws Exception {
LeaveSummary ls = new LeaveSummary();
LeaveSummaryRow lsr = new LeaveSummaryRow();
lsr.setAccrualCategory("testAC");
lsr.setUsageLimit(new BigDecimal(40));
List<LeaveSummaryRow> lsrList = new ArrayList<LeaveSummaryRow>();
lsrList.add(lsr);
ls.setLeaveSummaryRows(lsrList);
// adding brand new leave blocks
List<String> errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/19/2012", new BigDecimal(8), null);
Assert.assertEquals("There should be no error message" , 0, errors.size());
}
@Test
public void testValidateLeaveNonSpanOverMaxUsageRule() throws Exception {
LeaveSummary ls = new LeaveSummary();
LeaveSummaryRow lsr = new LeaveSummaryRow();
lsr.setAccrualCategory("testAC");
lsr.setUsageLimit(new BigDecimal(5));
List<LeaveSummaryRow> lsrList = new ArrayList<LeaveSummaryRow>();
lsrList.add(lsr);
ls.setLeaveSummaryRows(lsrList);
// adding brand new leave blocks
List<String> errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/15/2012", new BigDecimal(8), null);
Assert.assertEquals("There should be 1 error message" , 1, errors.size());
String anError = errors.get(0);
Assert.assertTrue("error message not correct" , anError.equals("This leave request would exceed the usage limit for " + lsr.getAccrualCategory()));
}
@Test
public void testValidateLeaveNonSpanEqualsMaxUsageRule() throws Exception {
LeaveSummary ls = new LeaveSummary();
LeaveSummaryRow lsr = new LeaveSummaryRow();
lsr.setAccrualCategory("testAC");
lsr.setUsageLimit(new BigDecimal(5));
List<LeaveSummaryRow> lsrList = new ArrayList<LeaveSummaryRow>();
lsrList.add(lsr);
ls.setLeaveSummaryRows(lsrList);
List<String> errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/15/2012", new BigDecimal(5), null);
Assert.assertEquals("There should be no error message" , 0, errors.size());
}
@Test
public void testValidateEditLeaveBlockMaxUsageRuleCaseOne() throws Exception {
//Leave Amount increases, Earn Code unchanged
LeaveSummary ls = new LeaveSummary();
LeaveSummaryRow lsr = new LeaveSummaryRow();
lsr.setAccrualCategory("testAC");
lsr.setUsageLimit(new BigDecimal(50));
lsr.setPendingLeaveRequests(new BigDecimal(25));
lsr.setYtdApprovedUsage(new BigDecimal(15));
List<LeaveSummaryRow> lsrList = new ArrayList<LeaveSummaryRow>();
lsrList.add(lsr);
ls.setLeaveSummaryRows(lsrList);
//updating an existing leave block
LeaveBlock.Builder aLeaveBlock = LeaveBlock.Builder.create("xxx", "EC", new BigDecimal(-10));
//LeaveBlockBo aLeaveBlock = new LeaveBlockBo();
//aLeaveBlock.setEarnCode("EC");
//aLeaveBlock.setLeaveAmount(new BigDecimal(-10)); //this amount, multiplied by the days in the span, is considered to be part of the pending leave requests.
List<String> errors = new ArrayList<String>();
// EC1 belongs to the accrual category testAC
// should still be under 50 effective difference is +9, over 1 days = 9 -> 40+12 < 50
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/15/2012", new BigDecimal(19), aLeaveBlock.build());
Assert.assertTrue("There should be no error message test 1" , errors.size()== 0);
// should be right at 50 effective difference is +10, over 1 days = 10 -> 40+10 = 50
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/15/2012", new BigDecimal(20), aLeaveBlock.build());
Assert.assertTrue("There should be no error message test 2" , errors.size()== 0);
// should be over 50 effective difference is +11, over 1 day = 11 -> 40+11 > 50
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/15/2012", new BigDecimal(21), aLeaveBlock.build());
Assert.assertTrue("There should be 1 error message test 3" , errors.size()== 1);
// should be over 50 effective difference is +2, over 6 days = 12 -> 40+12 > 50
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/20/2012", new BigDecimal(12), aLeaveBlock.build());
Assert.assertTrue("There should be 1 error message test 5" , errors.size()== 1);
// should be under effective difference is +2, over 4 days = 8 -> 40+8 < 50
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/18/2012", new BigDecimal(12), aLeaveBlock.build());
Assert.assertTrue("There should be 1 error message test 6" , errors.size()== 1);
}
@Test
public void testValidateEditLeaveBlockMaxUsageRuleCaseTwo() throws Exception {
//Leave Amount decreases, earn code remains the same.
LeaveSummary ls = new LeaveSummary();
LeaveSummaryRow lsr = new LeaveSummaryRow();
lsr.setAccrualCategory("testAC");
lsr.setUsageLimit(new BigDecimal(50));
lsr.setPendingLeaveRequests(new BigDecimal(25));
lsr.setYtdApprovedUsage(new BigDecimal(30));
List<LeaveSummaryRow> lsrList = new ArrayList<LeaveSummaryRow>();
lsrList.add(lsr);
ls.setLeaveSummaryRows(lsrList);
//updating an existing leave block
//Somehow a block enters the system that exceeds max_usage. The only way for it to be saved
//is if the net change drops below the usage limit.
LeaveBlock.Builder aLeaveBlock = LeaveBlock.Builder.create("xxx", "EC", new BigDecimal(-10));
//LeaveBlockBo aLeaveBlock = new LeaveBlockBo();
//aLeaveBlock.setEarnCode("EC");
//aLeaveBlock.setLeaveAmount(new BigDecimal(-10));
List<String> errors = new ArrayList<String>();
// effective difference is (-2), over 1 days = -2 -> 55+(-2) > 50
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/15/2012", new BigDecimal(8), aLeaveBlock.build());
Assert.assertTrue("There should be 1 error message" , errors.size()== 1);
// should be equal effective difference is (-0.5), over 5 days = -2.5 -> 55+(-2.5) > 50
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC", "02/15/2012", "02/19/2012", new BigDecimal(9.5), aLeaveBlock.build());
Assert.assertTrue("There should be 1 error message" , errors.size()== 1);
}
@Test
public void testValidateEditLeaveBlockMaxUsageRuleCaseThree() throws Exception {
//Leave Amount static, earn code changes.
LeaveSummary ls = new LeaveSummary();
LeaveSummaryRow lsr = new LeaveSummaryRow();
lsr.setAccrualCategory("testAC");
lsr.setUsageLimit(new BigDecimal(50));
lsr.setPendingLeaveRequests(new BigDecimal(25));
lsr.setYtdApprovedUsage(new BigDecimal(15));
LeaveSummaryRow lsr2 = new LeaveSummaryRow();
lsr2.setAccrualCategory("testAC2");
lsr2.setUsageLimit(new BigDecimal(15));
lsr2.setPendingLeaveRequests(new BigDecimal(5));
lsr2.setYtdApprovedUsage(new BigDecimal(4));
List<LeaveSummaryRow> lsrList = new ArrayList<LeaveSummaryRow>();
lsrList.add(lsr);
lsrList.add(lsr2);
ls.setLeaveSummaryRows(lsrList);
//updating an existing leave block
LeaveBlock.Builder aLeaveBlock = LeaveBlock.Builder.create("xxx", "EC", new BigDecimal(-10));
aLeaveBlock.setAccrualCategory("testAC");
//LeaveBlockBo aLeaveBlock = new LeaveBlockBo();
//aLeaveBlock.setEarnCode("EC");
//aLeaveBlock.setAccrualCategory("testAC");
//aLeaveBlock.setLeaveAmount(new BigDecimal(-10));
List<String> errors = new ArrayList<String>();
//Changing to an earn code with different accrual category, testAC2
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC2", "02/15/2012", "02/15/2012", new BigDecimal(6), aLeaveBlock.build());
Assert.assertTrue("There should be no error message. reached usage limit." , errors.size()== 0);
//Changing to an earn code with different accrual category, testAC2
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC2", "02/15/2012", "02/15/2012", new BigDecimal(7), aLeaveBlock.build());
Assert.assertTrue("There should be 1 error message, there were " + errors.size() + " errors" , errors.size()== 1);
//Changing to an earn code with different accrual category, testAC2 with spanning days.
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC2", "02/15/2012", "02/19/2012", new BigDecimal(1), aLeaveBlock.build());
Assert.assertTrue("There should be no error message, there were " + errors.size() + " errors" , errors.size()== 0);
//Changing to an earn code with different accrual category, testAC2 with spanning days.
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC2", "02/15/2012", "02/20/2012", new BigDecimal(1), aLeaveBlock.build());
Assert.assertTrue("There should be no error message, there were " + errors.size() + " errors" , errors.size()== 0);
//Changing to an earn code with different accrual category, testAC2 with spanning days.
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC2", "02/15/2012", "02/21/2012", new BigDecimal(1), aLeaveBlock.build());
Assert.assertTrue("There should be 1 error message, there were " + errors.size() + " errors" , errors.size()== 1);
//Changing to an earn code within same accrual category, testAC
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC1", "02/15/2012", "02/15/2012", new BigDecimal(10), aLeaveBlock.build());
Assert.assertTrue("There should be no error message, there were " + errors.size() + " errors" , errors.size()== 0);
//Changing to an earn code within same accrual category, testAC with spanning days.
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC1", "02/15/2012", "02/19/2012", new BigDecimal(2), aLeaveBlock.build());
Assert.assertTrue("There should be 0 error message, there were " + errors.size() + " errors" , errors.size()== 0);
//Changing to an earn code within same accrual category, testAC with spanning days.
errors = LeaveCalendarValidationUtil.validateLeaveAccrualRuleMaxUsage(ls, "EC2", "02/15/2012", "02/25/2012", new BigDecimal(1), aLeaveBlock.build());
Assert.assertTrue("There should be 1 error message, there were " + errors.size() + " errors" , errors.size()== 1);
}
@Test
public void testGetWarningTextForLeaveBlocks() throws Exception {
// create two leave blocks with two different earn codes
// earn code "ECA" has fmla=Y, has earn code group with warning messages
// earn Code "ECB" has fmla = N, has earn code group with warning messages
// earn code "ECC" does not have earn code group with warning messages
List<LeaveBlock> leaveBlocks = new ArrayList<LeaveBlock>();
LeaveBlockBo lbA = new LeaveBlockBo();
lbA.setEarnCode("ECA");
lbA.setLeaveDate(LocalDate.now().toDate());
leaveBlocks.add(LeaveBlockBo.to(lbA));
LeaveBlockBo lbB = new LeaveBlockBo();
lbB.setEarnCode("ECB");
lbB.setLeaveDate(LocalDate.now().toDate());
leaveBlocks.add(LeaveBlockBo.to(lbB));
LeaveBlockBo lbC = new LeaveBlockBo();
lbC.setEarnCode("ECC");
lbC.setLeaveDate(LocalDate.now().toDate());
leaveBlocks.add(LeaveBlockBo.to(lbC));
Map<String, Set<String>> allMessages = LeaveCalendarValidationUtil.getWarningMessagesForLeaveBlocks(leaveBlocks, LocalDate.now().toDateTimeAtStartOfDay(), new DateTime().plusDays(1));
int numberOfMessages = 0;
for (Set<String> msgs : allMessages.values()){
numberOfMessages += msgs.size();
}
Assert.assertTrue("There should be 2 warning messages, not " + numberOfMessages, numberOfMessages== 2);
for (Set<String> msgs : allMessages.values()){
for (String message : msgs) {
Assert.assertTrue("Warning message should be 'Test Message' or 'Test Message1'", message.equals("Test Message") || message.equals("Test Message1"));
}
}
}
/* In order for tests of the following form to work, need to change status of a document to enroute/approved
* without actually routing / approving the document. OR, set up a context within which these actions can be performed.
*/
/* @Test
public void testValidatePendingTransactions() throws Exception {
Assert.assertNull(null);
BalanceTransfer bt = new BalanceTransfer();
bt.setAmountTransferred(new BigDecimal(1.0));
bt.setTransferAmount(new BigDecimal(1.0));
bt.setForfeitedAmount(new BigDecimal(1.0));
bt.setAccrualCategoryRule("");
bt.setEffectiveDate(TKUtils.getCurrentDate());
bt.setFromAccrualCategory("testAC");
bt.setToAccrualCategory("testAC2");
bt.setPrincipalId("admin");
mockSubmitToWorkflow(bt);
Calendar cal = Calendar.getInstance();
cal.setTime(TKUtils.getCurrentDate());
cal.add(Calendar.MONTH, -1);
Date from = cal.getTime();
cal.add(Calendar.MONTH, 2);
Date to = cal.getTime();
Map<String,Set<String>> allMessages = new HashMap<String, Set<String>>();
allMessages.putAll(LeaveCalendarValidationUtil.validatePendingTransactions("admin", LocalDate.fromDateFields(from), LocalDate.fromDateFields(to)));
Assert.assertTrue(allMessages.get("actionMessages").size() > 0);
Set<String> actionMessages = allMessages.get("actionMessage");
Assert.assertTrue("Should contain warning message for pending transaction", actionMessages.contains("A pending balance transfer exists on this calendar. " +
"It must be finalized before this calendar can be approved"));
}
private void mockSubmitToWorkflow(BalanceTransfer balanceTransfer) {
// TODO Auto-generated method stub
//balanceTransfer.setStatus(HrConstants.ROUTE_STATUS.ENROUTE);
EntityNamePrincipalName principalName = null;
if (balanceTransfer.getPrincipalId() != null) {
principalName = KimApiServiceLocator.getIdentityService().getDefaultNamesForPrincipalId(balanceTransfer.getPrincipalId());
}
MaintenanceDocument document = KRADServiceLocatorWeb.getMaintenanceDocumentService().setupNewMaintenanceDocument(BalanceTransfer.class.getName(),
"BalanceTransferDocumentType",KRADConstants.MAINTENANCE_NEW_ACTION);
String personName = (principalName != null && principalName.getDefaultName() != null) ? principalName.getDefaultName().getCompositeName() : StringUtils.EMPTY;
String date = TKUtils.formatDate(balanceTransfer.getEffectiveLocalDate());
document.getDocumentHeader().setDocumentDescription(personName + " (" + balanceTransfer.getPrincipalId() + ") - " + date);
Map<String,String[]> params = new HashMap<String,String[]>();
KRADServiceLocatorWeb.getMaintenanceDocumentService().setupMaintenanceObject(document, KRADConstants.MAINTENANCE_NEW_ACTION, params);
BalanceTransfer btObj = (BalanceTransfer) document.getNewMaintainableObject().getDataObject();
btObj.setAccrualCategoryRule(balanceTransfer.getAccrualCategoryRule());
btObj.setEffectiveDate(balanceTransfer.getEffectiveDate());
btObj.setForfeitedAmount(balanceTransfer.getForfeitedAmount());
btObj.setFromAccrualCategory(balanceTransfer.getFromAccrualCategory());
btObj.setPrincipalId(balanceTransfer.getPrincipalId());
btObj.setToAccrualCategory(balanceTransfer.getToAccrualCategory());
btObj.setTransferAmount(balanceTransfer.getTransferAmount());
btObj.setAmountTransferred(balanceTransfer.getAmountTransferred());
btObj.setSstoId(balanceTransfer.getSstoId());
btObj.setDocumentHeaderId(document.getDocumentHeader().getWorkflowDocument().getDocumentId());
//LmServiceLocator.getBalanceTransferService().saveOrUpdate(btObj);
document.getNewMaintainableObject().setDataObject(btObj);
try {
KRADServiceLocatorWeb.getDocumentService().saveDocument(document);
} catch (WorkflowException e) {
// TODO Auto-generated catch block
Assert.fail("Caught workflow exception while saving document");
}
document.getDocumentHeader().getWorkflowDocument().saveDocument("");
balanceTransfer = LmServiceLocator.getBalanceTransferService().transfer(btObj);
}*/
}
| |
/*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.guvnor.asset.management.client.editors.project.structure.widgets;
import com.github.gwtbootstrap.client.ui.Button;
import com.github.gwtbootstrap.client.ui.HelpInline;
import com.github.gwtbootstrap.client.ui.Label;
import com.github.gwtbootstrap.client.ui.RadioButton;
import com.github.gwtbootstrap.client.ui.TextBox;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.uibinder.client.UiHandler;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.Widget;
import org.guvnor.asset.management.client.i18n.Constants;
public class ProjectStructureDataViewImpl extends Composite
implements ProjectStructureDataView {
interface NewProjectStructureDataViewImplUIBinder
extends UiBinder<Widget, ProjectStructureDataViewImpl> {
}
private static NewProjectStructureDataViewImplUIBinder uiBinder = GWT.create( NewProjectStructureDataViewImplUIBinder.class );
@UiField
RadioButton isSingleModuleRadioButton;
@UiField
RadioButton isMultiModuleRadioButton;
@UiField
TextBox groupIdTextBox;
@UiField
TextBox artifactIdTextBox;
@UiField
TextBox versionTextBox;
@UiField
HelpInline isSingleModuleRadioButtonHelpInline;
@UiField
HelpInline isMultiModuleRadioButtonHelpInline;
@UiField
HelpInline groupIdTextBoxHelpInline;
@UiField
HelpInline artifactIdTextBoxHelpInline;
@UiField
HelpInline versionTextBoxHelpInline;
@UiField
Button initProjectStructureButton;
@UiField
Label projectTypeLabel;
@UiField
com.google.gwt.user.client.ui.Label singleProjectGroupId;
@UiField
TextBox singleProjectGroupIdTextBox;
@UiField
com.google.gwt.user.client.ui.Label singleProjectArtifactId;
@UiField
TextBox singleProjectArtifactIdTextBox;
@UiField
com.google.gwt.user.client.ui.Label singleProjectVersion;
@UiField
TextBox singleProjectVersionTextBox;
@UiField
RadioButton isUnmanagedRepositoryRadioButton;
@UiField
HelpInline isUnmanagedRepositoryButtonHelpInline;
private Presenter presenter;
private ViewMode mode;
public ProjectStructureDataViewImpl() {
initWidget( uiBinder.createAndBindUi( this ) );
clear();
setCurrentSingleProjectInfoVisible( false );
setMode( ViewMode.CREATE_STRUCTURE );
}
@Override
public void setPresenter( Presenter presenter ) {
this.presenter = presenter;
}
@Override
public void setGroupId( String groupId ) {
groupIdTextBox.setText( groupId );
}
@Override
public String getGroupId() {
return groupIdTextBox.getText();
}
@Override
public void setArtifactId( String artifactId ) {
artifactIdTextBox.setText( artifactId );
}
@Override
public String getArtifactId() {
return artifactIdTextBox.getText();
}
@Override
public void setVersion( String version ) {
versionTextBox.setText( version );
}
@Override
public String getVersionId() {
return versionTextBox.getText();
}
@Override
public void setMultiModule() {
enableMultiModeParams();
}
@Override
public void setSingleModule() {
enableSingleModeParams();
}
@Override
public boolean isSingleModule() {
return isSingleModuleRadioButton.getValue();
}
@Override
public boolean isMultiModule() {
return isMultiModuleRadioButton.getValue();
}
@Override
public boolean isUnmanagedRepository() {
return isUnmanagedRepositoryRadioButton.getValue();
}
@Override
public void setSingleProjectGroupId( String groupId ) {
singleProjectGroupIdTextBox.setText( groupId );
}
@Override
public void setSingleProjectArtifactId( String artifactId ) {
singleProjectArtifactIdTextBox.setText( artifactId );
}
@Override
public void setSingleProjectVersion( String version ) {
singleProjectVersionTextBox.setText( version );
}
@Override
public void setMode( ViewMode mode ) {
this.mode = mode;
if ( mode == ViewMode.CREATE_STRUCTURE ) {
projectTypeLabel.setText( Constants.INSTANCE.Project_structure_view_create_projectTypeLabel() );
isSingleModuleRadioButton.setText( Constants.INSTANCE.Project_structure_view_create_isSingleModuleRadioButton() );
isSingleModuleRadioButtonHelpInline.setText( Constants.INSTANCE.Project_structure_view_create_isSingleModuleRadioButtonHelpInline() );
isSingleModuleRadioButton.setEnabled( true );
isMultiModuleRadioButton.setText( Constants.INSTANCE.Project_structure_view_create_isMultiModuleRadioButton() );
isMultiModuleRadioButtonHelpInline.setText( Constants.INSTANCE.Project_structure_view_create_isMultiModuleRadioButtonHelpInline() );
isMultiModuleRadioButton.setEnabled( true );
groupIdTextBoxHelpInline.setText( Constants.INSTANCE.Project_structure_view_create_groupIdTextBoxHelpInline() );
artifactIdTextBoxHelpInline.setText( Constants.INSTANCE.Project_structure_view_create_artifactIdTextBoxHelpInline() );
versionTextBoxHelpInline.setText( Constants.INSTANCE.Project_structure_view_create_versionTextBoxHelpInline() );
isUnmanagedRepositoryRadioButton.setText( Constants.INSTANCE.Project_structure_view_create_isUnmanagedRepositoryRadioButton() );
isUnmanagedRepositoryButtonHelpInline.setText( Constants.INSTANCE.Project_structure_view_create_isUnmanagedRepositoryButtonHelpInline() );
isUnmanagedRepositoryRadioButton.setEnabled( true );
initProjectStructureButton.setText( Constants.INSTANCE.InitProjectStructure() );
initProjectStructureButton.setVisible( true );
enableSingleModeParams();
setCurrentSingleProjectInfoVisible( false );
} else if ( mode == ViewMode.EDIT_SINGLE_MODULE_PROJECT ) {
projectTypeLabel.setText( Constants.INSTANCE.Project_structure_view_edit_single_projectTypeLabel() );
isSingleModuleRadioButton.setText( Constants.INSTANCE.Project_structure_view_edit_single_isSingleModuleRadioButton() );
isSingleModuleRadioButtonHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_single_isSingleModuleRadioButtonHelpInline() );
isSingleModuleRadioButton.setEnabled( true );
isMultiModuleRadioButton.setText( Constants.INSTANCE.Project_structure_view_edit_single_isMultiModuleRadioButton() );
isMultiModuleRadioButtonHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_single_isMultiModuleRadioButtonHelpInline() );
isMultiModuleRadioButton.setEnabled( true );
groupIdTextBoxHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_single_groupIdTextBoxHelpInline() );
artifactIdTextBoxHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_single_artifactIdTextBoxHelpInline() );
versionTextBoxHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_single_versionTextBoxHelpInline() );
enableSingleModeParams();
isUnmanagedRepositoryButtonHelpInline.setVisible( false );
isUnmanagedRepositoryRadioButton.setVisible( false );
initProjectStructureButton.setText( Constants.INSTANCE.EditProject() );
initProjectStructureButton.setVisible( true );
setCurrentSingleProjectInfoVisible( true );
} else if ( mode == ViewMode.EDIT_MULTI_MODULE_PROJECT ) {
projectTypeLabel.setText( Constants.INSTANCE.Project_structure_view_edit_multi_projectTypeLabel() );
enableMultiModeParams();
isMultiModuleRadioButton.setEnabled( false );
isSingleModuleRadioButton.setVisible( false );
isSingleModuleRadioButtonHelpInline.setVisible( false );
isMultiModuleRadioButton.setText( Constants.INSTANCE.Project_structure_view_edit_multi_isMultiModuleRadioButton() );
isMultiModuleRadioButtonHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_multi_isMultiModuleRadioButtonHelpInline() );
groupIdTextBoxHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_multi_groupIdTextBoxHelpInline() );
artifactIdTextBoxHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_multi_artifactIdTextBoxHelpInline() );
versionTextBoxHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_multi_versionTextBoxHelpInline() );
initProjectStructureButton.setText( Constants.INSTANCE.SaveChanges() );
isUnmanagedRepositoryRadioButton.setVisible( false );
isUnmanagedRepositoryButtonHelpInline.setVisible( false );
setCurrentSingleProjectInfoVisible( false );
} else if ( mode == ViewMode.EDIT_UNMANAGED_REPOSITORY ) {
projectTypeLabel.setText( Constants.INSTANCE.Project_structure_view_edit_unmanaged_projectTypeLabel() );
//enable unmanaged mode fields
isUnmanagedRepositoryRadioButton.setVisible( true );
isUnmanagedRepositoryRadioButton.setValue( true );
isUnmanagedRepositoryRadioButton.setEnabled( false );
isUnmanagedRepositoryButtonHelpInline.setVisible( true );
isUnmanagedRepositoryRadioButton.setText( Constants.INSTANCE.Project_structure_view_edit_unmanaged_isUnmanagedRepositoryRadioButton() );
isUnmanagedRepositoryButtonHelpInline.setText( Constants.INSTANCE.Project_structure_view_edit_unmanaged_isUnmanagedRepositoryButtonHelpInline() );
//disable single mode fields
isSingleModuleRadioButton.setVisible( false );
isSingleModuleRadioButtonHelpInline.setVisible( false );
setCurrentSingleProjectInfoVisible( false );
//disable multi mode fields.
isMultiModuleRadioButton.setVisible( false );
isMultiModuleRadioButtonHelpInline.setVisible( false );
groupIdTextBox.setVisible( false );
groupIdTextBoxHelpInline.setVisible( false );
artifactIdTextBox.setVisible( false );
artifactIdTextBoxHelpInline.setVisible( false );
versionTextBox.setVisible( false );
versionTextBoxHelpInline.setVisible( false );
initProjectStructureButton.setVisible( false );
}
}
public void enableMultiModeParams() {
enableModeParams( ViewMode.EDIT_MULTI_MODULE_PROJECT );
}
public void enableSingleModeParams() {
enableModeParams( ViewMode.EDIT_SINGLE_MODULE_PROJECT );
}
public void enableUnmanagedStructureMode() {
//TODO
}
public void clear() {
singleProjectGroupIdTextBox.setText( null );
singleProjectArtifactIdTextBox.setText( null );
singleProjectVersionTextBox.setText( null );
groupIdTextBox.setText( null );
artifactIdTextBox.setText( null );
versionTextBox.setText( null );
}
@Override
public void enableActions( boolean value ) {
isSingleModuleRadioButton.setEnabled( value );
isMultiModuleRadioButton.setEnabled( value );
initProjectStructureButton.setEnabled( value );
}
@Override public void setReadonly( boolean readonly ) {
groupIdTextBox.setReadOnly( readonly );
artifactIdTextBox.setReadOnly( readonly );
versionTextBox.setReadOnly( readonly );
singleProjectGroupIdTextBox.setReadOnly( readonly );
singleProjectArtifactIdTextBox.setReadOnly( readonly );
singleProjectVersionTextBox.setReadOnly( readonly );
enableActions( !readonly );
}
private void enableModeParams( ViewMode mode ) {
//single mode fields
isSingleModuleRadioButton.setVisible( true );
isSingleModuleRadioButtonHelpInline.setVisible( true );
isSingleModuleRadioButton.setValue( mode == ViewMode.EDIT_SINGLE_MODULE_PROJECT );
//multi mode fields.
isMultiModuleRadioButton.setVisible( true );
isMultiModuleRadioButtonHelpInline.setVisible( true );
isMultiModuleRadioButton.setValue( mode == ViewMode.EDIT_MULTI_MODULE_PROJECT );
isUnmanagedRepositoryRadioButton.setVisible( true );
isUnmanagedRepositoryButtonHelpInline.setVisible( true );
isUnmanagedRepositoryRadioButton.setValue( mode == ViewMode.EDIT_UNMANAGED_REPOSITORY );
groupIdTextBox.setVisible( mode == ViewMode.EDIT_MULTI_MODULE_PROJECT );
groupIdTextBoxHelpInline.setVisible( mode == ViewMode.EDIT_MULTI_MODULE_PROJECT );
artifactIdTextBox.setVisible( mode == ViewMode.EDIT_MULTI_MODULE_PROJECT );
artifactIdTextBoxHelpInline.setVisible( mode == ViewMode.EDIT_MULTI_MODULE_PROJECT );
versionTextBox.setVisible( mode == ViewMode.EDIT_MULTI_MODULE_PROJECT );
versionTextBoxHelpInline.setVisible( mode == ViewMode.EDIT_MULTI_MODULE_PROJECT );
}
private void setCurrentSingleProjectInfoVisible( boolean visible ) {
singleProjectGroupId.setVisible( visible );
singleProjectGroupIdTextBox.setVisible( visible );
singleProjectGroupIdTextBox.setReadOnly( true );
singleProjectArtifactId.setVisible( visible );
singleProjectArtifactIdTextBox.setVisible( visible );
singleProjectArtifactIdTextBox.setReadOnly( true );
singleProjectVersion.setVisible( visible );
singleProjectVersionTextBox.setVisible( visible );
singleProjectVersionTextBox.setReadOnly( true );
}
//UI handlers.
@UiHandler( "initProjectStructureButton" )
void onInitProjectStructureClick( final ClickEvent e ) {
if ( mode == ViewMode.CREATE_STRUCTURE ) {
presenter.onInitProjectStructure();
} else if ( mode == ViewMode.EDIT_SINGLE_MODULE_PROJECT ) {
if ( isSingleModuleRadioButton.getValue() ) {
presenter.onOpenSingleProject();
} else {
presenter.onConvertToMultiModule();
}
} else if ( mode == ViewMode.EDIT_MULTI_MODULE_PROJECT ) {
presenter.onSaveProjectStructure();
}
}
@UiHandler( "isMultiModuleRadioButton" )
void multiModuleCheckBoxClicked( final ClickEvent event ) {
enableMultiModeParams();
if ( mode == ViewMode.EDIT_SINGLE_MODULE_PROJECT ) {
initProjectStructureButton.setVisible( true );
initProjectStructureButton.setText( Constants.INSTANCE.ConvertToMultiModule() );
groupIdTextBox.setText( singleProjectGroupIdTextBox.getText() );
versionTextBox.setText( singleProjectVersionTextBox.getText() );
isUnmanagedRepositoryRadioButton.setVisible( false );
isUnmanagedRepositoryButtonHelpInline.setVisible( false );
}
presenter.onProjectModeChange();
}
@UiHandler( "isSingleModuleRadioButton" )
void singleModuleCheckBoxClicked( final ClickEvent event ) {
enableSingleModeParams();
if ( mode == ViewMode.EDIT_SINGLE_MODULE_PROJECT ) {
initProjectStructureButton.setVisible( true );
initProjectStructureButton.setText( Constants.INSTANCE.EditProject() );
isUnmanagedRepositoryRadioButton.setVisible( false );
isUnmanagedRepositoryButtonHelpInline.setVisible( false );
}
presenter.onProjectModeChange();
}
@UiHandler( "isUnmanagedRepositoryRadioButton" )
void setUnmanagedRepositoryRadioButtonClicked( final ClickEvent event ) {
enableModeParams( ViewMode.EDIT_UNMANAGED_REPOSITORY );
presenter.onProjectModeChange();
}
}
| |
/*
* Copyright (c) 2009 Julien Ponge. All rights reserved.
*
* <julien.ponge@gmail.com>
* http://julien.ponge.info/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This work is based on the LZMA SDK by Igor Pavlov.
* The LZMA SDK is placed under the public domain, and can be obtained from
*
* http://www.7-zip.org/sdk.html
*
* The LzmaInputStream and LzmaOutputStream classes were inspired by the
* work of Christopher League, although they are not derivative works.
*
* http://contrapunctus.net/league/haques/lzmajio/
*/
package lzma.sdk.lzma;
import lzma.sdk.ICodeProgress;
import lzma.sdk.rangecoder.BitTreeEncoder;
import java.io.IOException;
public class Encoder
{
public static final int EMatchFinderTypeBT2 = 0;
public static final int EMatchFinderTypeBT4 = 1;
static final int kIfinityPrice = 0xFFFFFFF;
static byte[] g_FastPos = new byte[1 << 11];
static
{
int kFastSlots = 22;
int c = 2;
g_FastPos[0] = 0;
g_FastPos[1] = 1;
for (int slotFast = 2; slotFast < kFastSlots; slotFast++)
{
int k = (1 << ((slotFast >> 1) - 1));
for (int j = 0; j < k; j++, c++)
{
g_FastPos[c] = (byte) slotFast;
}
}
}
static int getPosSlot(int pos)
{
if (pos < (1 << 11))
{
return g_FastPos[pos];
}
if (pos < (1 << 21))
{
return (g_FastPos[pos >> 10] + 20);
}
return (g_FastPos[pos >> 20] + 40);
}
static int getPosSlot2(int pos)
{
if (pos < (1 << 17))
{
return (g_FastPos[pos >> 6] + 12);
}
if (pos < (1 << 27))
{
return (g_FastPos[pos >> 16] + 32);
}
return (g_FastPos[pos >> 26] + 52);
}
int _state = Base.stateInit();
byte _previousByte;
int[] _repDistances = new int[Base.kNumRepDistances];
void baseInit()
{
_state = Base.stateInit();
_previousByte = 0;
for (int i = 0; i < Base.kNumRepDistances; i++)
{
_repDistances[i] = 0;
}
}
static final int kDefaultDictionaryLogSize = 22;
static final int kNumFastBytesDefault = 0x20;
class LiteralEncoder
{
class Encoder2
{
short[] m_Encoders = new short[0x300];
public void init()
{
lzma.sdk.rangecoder.Encoder.initBitModels(m_Encoders);
}
public void encode(lzma.sdk.rangecoder.Encoder rangeEncoder, byte symbol) throws IOException
{
int context = 1;
for (int i = 7; i >= 0; i--)
{
int bit = ((symbol >> i) & 1);
rangeEncoder.encode(m_Encoders, context, bit);
context = (context << 1) | bit;
}
}
public void encodeMatched(lzma.sdk.rangecoder.Encoder rangeEncoder, byte matchByte, byte symbol) throws IOException
{
int context = 1;
boolean same = true;
for (int i = 7; i >= 0; i--)
{
int bit = ((symbol >> i) & 1);
int state = context;
if (same)
{
int matchBit = ((matchByte >> i) & 1);
state += ((1 + matchBit) << 8);
same = (matchBit == bit);
}
rangeEncoder.encode(m_Encoders, state, bit);
context = (context << 1) | bit;
}
}
public int getPrice(boolean matchMode, byte matchByte, byte symbol)
{
int price = 0;
int context = 1;
int i = 7;
if (matchMode)
{
for (; i >= 0; i--)
{
int matchBit = (matchByte >> i) & 1;
int bit = (symbol >> i) & 1;
price += lzma.sdk.rangecoder.Encoder.getPrice(m_Encoders[((1 + matchBit) << 8) + context], bit);
context = (context << 1) | bit;
if (matchBit != bit)
{
i--;
break;
}
}
}
for (; i >= 0; i--)
{
int bit = (symbol >> i) & 1;
price += lzma.sdk.rangecoder.Encoder.getPrice(m_Encoders[context], bit);
context = (context << 1) | bit;
}
return price;
}
}
Encoder2[] m_Coders;
int m_NumPrevBits;
int m_NumPosBits;
int m_PosMask;
public void create(int numPosBits, int numPrevBits)
{
if (m_Coders != null && m_NumPrevBits == numPrevBits && m_NumPosBits == numPosBits)
{
return;
}
m_NumPosBits = numPosBits;
m_PosMask = (1 << numPosBits) - 1;
m_NumPrevBits = numPrevBits;
int numStates = 1 << (m_NumPrevBits + m_NumPosBits);
m_Coders = new Encoder2[numStates];
for (int i = 0; i < numStates; i++)
{
m_Coders[i] = new Encoder2();
}
}
public void init()
{
int numStates = 1 << (m_NumPrevBits + m_NumPosBits);
for (int i = 0; i < numStates; i++)
{
m_Coders[i].init();
}
}
public Encoder2 getSubCoder(int pos, byte prevByte)
{
return m_Coders[((pos & m_PosMask) << m_NumPrevBits) + ((prevByte & 0xFF) >>> (8 - m_NumPrevBits))];
}
}
class LenEncoder
{
short[] _choice = new short[2];
BitTreeEncoder[] _lowCoder = new BitTreeEncoder[Base.kNumPosStatesEncodingMax];
BitTreeEncoder[] _midCoder = new BitTreeEncoder[Base.kNumPosStatesEncodingMax];
BitTreeEncoder _highCoder = new BitTreeEncoder(Base.kNumHighLenBits);
public LenEncoder()
{
for (int posState = 0; posState < Base.kNumPosStatesEncodingMax; posState++)
{
_lowCoder[posState] = new BitTreeEncoder(Base.kNumLowLenBits);
_midCoder[posState] = new BitTreeEncoder(Base.kNumMidLenBits);
}
}
public void init(int numPosStates)
{
lzma.sdk.rangecoder.Encoder.initBitModels(_choice);
for (int posState = 0; posState < numPosStates; posState++)
{
_lowCoder[posState].init();
_midCoder[posState].init();
}
_highCoder.init();
}
public void encode(lzma.sdk.rangecoder.Encoder rangeEncoder, int symbol, int posState) throws IOException
{
if (symbol < Base.kNumLowLenSymbols)
{
rangeEncoder.encode(_choice, 0, 0);
_lowCoder[posState].encode(rangeEncoder, symbol);
}
else
{
symbol -= Base.kNumLowLenSymbols;
rangeEncoder.encode(_choice, 0, 1);
if (symbol < Base.kNumMidLenSymbols)
{
rangeEncoder.encode(_choice, 1, 0);
_midCoder[posState].encode(rangeEncoder, symbol);
}
else
{
rangeEncoder.encode(_choice, 1, 1);
_highCoder.encode(rangeEncoder, symbol - Base.kNumMidLenSymbols);
}
}
}
public void setPrices(int posState, int numSymbols, int[] prices, int st)
{
int a0 = lzma.sdk.rangecoder.Encoder.getPrice0(_choice[0]);
int a1 = lzma.sdk.rangecoder.Encoder.getPrice1(_choice[0]);
int b0 = a1 + lzma.sdk.rangecoder.Encoder.getPrice0(_choice[1]);
int b1 = a1 + lzma.sdk.rangecoder.Encoder.getPrice1(_choice[1]);
int i;
for (i = 0; i < Base.kNumLowLenSymbols; i++)
{
if (i >= numSymbols)
{
return;
}
prices[st + i] = a0 + _lowCoder[posState].getPrice(i);
}
for (; i < Base.kNumLowLenSymbols + Base.kNumMidLenSymbols; i++)
{
if (i >= numSymbols)
{
return;
}
prices[st + i] = b0 + _midCoder[posState].getPrice(i - Base.kNumLowLenSymbols);
}
for (; i < numSymbols; i++)
{
prices[st + i] = b1 + _highCoder.getPrice(i - Base.kNumLowLenSymbols - Base.kNumMidLenSymbols);
}
}
}
class LenPriceTableEncoder extends LenEncoder
{
int[] _prices = new int[Base.kNumLenSymbols << Base.kNumPosStatesBitsEncodingMax];
int _tableSize;
int[] _counters = new int[Base.kNumPosStatesEncodingMax];
public void setTableSize(int tableSize)
{
_tableSize = tableSize;
}
public int getPrice(int symbol, int posState)
{
return _prices[posState * Base.kNumLenSymbols + symbol];
}
void updateTable(int posState)
{
setPrices(posState, _tableSize, _prices, posState * Base.kNumLenSymbols);
_counters[posState] = _tableSize;
}
public void updateTables(int numPosStates)
{
for (int posState = 0; posState < numPosStates; posState++)
{
updateTable(posState);
}
}
public void encode(lzma.sdk.rangecoder.Encoder rangeEncoder, int symbol, int posState) throws IOException
{
super.encode(rangeEncoder, symbol, posState);
if (--_counters[posState] == 0)
{
updateTable(posState);
}
}
}
static final int kNumOpts = 1 << 12;
class Optimal
{
public int State;
public boolean Prev1IsChar;
public boolean Prev2;
public int PosPrev2;
public int BackPrev2;
public int Price;
public int PosPrev;
public int BackPrev;
public int Backs0;
public int Backs1;
public int Backs2;
public int Backs3;
public void makeAsChar()
{
BackPrev = -1;
Prev1IsChar = false;
}
public void makeAsShortRep()
{
BackPrev = 0;
Prev1IsChar = false;
}
public boolean isShortRep()
{
return (BackPrev == 0);
}
}
Optimal[] _optimum = new Optimal[kNumOpts];
lzma.sdk.lz.BinTree _matchFinder = null;
lzma.sdk.rangecoder.Encoder _rangeEncoder = new lzma.sdk.rangecoder.Encoder();
short[] _isMatch = new short[Base.kNumStates << Base.kNumPosStatesBitsMax];
short[] _isRep = new short[Base.kNumStates];
short[] _isRepG0 = new short[Base.kNumStates];
short[] _isRepG1 = new short[Base.kNumStates];
short[] _isRepG2 = new short[Base.kNumStates];
short[] _isRep0Long = new short[Base.kNumStates << Base.kNumPosStatesBitsMax];
BitTreeEncoder[] _posSlotEncoder = new BitTreeEncoder[Base.kNumLenToPosStates]; // kNumPosSlotBits
short[] _posEncoders = new short[Base.kNumFullDistances - Base.kEndPosModelIndex];
BitTreeEncoder _posAlignEncoder = new BitTreeEncoder(Base.kNumAlignBits);
LenPriceTableEncoder _lenEncoder = new LenPriceTableEncoder();
LenPriceTableEncoder _repMatchLenEncoder = new LenPriceTableEncoder();
LiteralEncoder _literalEncoder = new LiteralEncoder();
int[] _matchDistances = new int[Base.kMatchMaxLen * 2 + 2];
int _numFastBytes = kNumFastBytesDefault;
int _longestMatchLength;
int _numDistancePairs;
int _additionalOffset;
int _optimumEndIndex;
int _optimumCurrentIndex;
boolean _longestMatchWasFound;
int[] _posSlotPrices = new int[1 << (Base.kNumPosSlotBits + Base.kNumLenToPosStatesBits)];
int[] _distancesPrices = new int[Base.kNumFullDistances << Base.kNumLenToPosStatesBits];
int[] _alignPrices = new int[Base.kAlignTableSize];
int _alignPriceCount;
int _distTableSize = (kDefaultDictionaryLogSize * 2);
int _posStateBits = 2;
int _posStateMask = (4 - 1);
int _numLiteralPosStateBits = 0;
int _numLiteralContextBits = 3;
int _dictionarySize = (1 << kDefaultDictionaryLogSize);
int _dictionarySizePrev = -1;
int _numFastBytesPrev = -1;
long nowPos64;
boolean _finished;
java.io.InputStream _inStream;
int _matchFinderType = EMatchFinderTypeBT4;
boolean _writeEndMark = false;
boolean _needReleaseMFStream = false;
void create()
{
if (_matchFinder == null)
{
int numHashBytes = 4;
if (_matchFinderType == EMatchFinderTypeBT2)
{
numHashBytes = 2;
}
lzma.sdk.lz.BinTree bt = new lzma.sdk.lz.BinTree(numHashBytes);
_matchFinder = bt;
}
_literalEncoder.create(_numLiteralPosStateBits, _numLiteralContextBits);
if (_dictionarySize == _dictionarySizePrev && _numFastBytesPrev == _numFastBytes)
{
return;
}
_matchFinder.create(_dictionarySize, kNumOpts, _numFastBytes, Base.kMatchMaxLen + 1);
_dictionarySizePrev = _dictionarySize;
_numFastBytesPrev = _numFastBytes;
}
public Encoder()
{
for (int i = 0; i < kNumOpts; i++)
{
_optimum[i] = new Optimal();
}
for (int i = 0; i < Base.kNumLenToPosStates; i++)
{
_posSlotEncoder[i] = new BitTreeEncoder(Base.kNumPosSlotBits);
}
}
void init()
{
baseInit();
_rangeEncoder.init();
lzma.sdk.rangecoder.Encoder.initBitModels(_isMatch);
lzma.sdk.rangecoder.Encoder.initBitModels(_isRep0Long);
lzma.sdk.rangecoder.Encoder.initBitModels(_isRep);
lzma.sdk.rangecoder.Encoder.initBitModels(_isRepG0);
lzma.sdk.rangecoder.Encoder.initBitModels(_isRepG1);
lzma.sdk.rangecoder.Encoder.initBitModels(_isRepG2);
lzma.sdk.rangecoder.Encoder.initBitModels(_posEncoders);
_literalEncoder.init();
for (int i = 0; i < Base.kNumLenToPosStates; i++)
{
_posSlotEncoder[i].init();
}
_lenEncoder.init(1 << _posStateBits);
_repMatchLenEncoder.init(1 << _posStateBits);
_posAlignEncoder.init();
_longestMatchWasFound = false;
_optimumEndIndex = 0;
_optimumCurrentIndex = 0;
_additionalOffset = 0;
}
int readMatchDistances() throws java.io.IOException
{
int lenRes = 0;
_numDistancePairs = _matchFinder.getMatches(_matchDistances);
if (_numDistancePairs > 0)
{
lenRes = _matchDistances[_numDistancePairs - 2];
if (lenRes == _numFastBytes)
{
lenRes += _matchFinder.getMatchLen(lenRes - 1, _matchDistances[_numDistancePairs - 1],
Base.kMatchMaxLen - lenRes);
}
}
_additionalOffset++;
return lenRes;
}
void movePos(int num) throws java.io.IOException
{
if (num > 0)
{
_matchFinder.skip(num);
_additionalOffset += num;
}
}
int getRepLen1Price(int state, int posState)
{
return lzma.sdk.rangecoder.Encoder.getPrice0(_isRepG0[state]) +
lzma.sdk.rangecoder.Encoder.getPrice0(_isRep0Long[(state << Base.kNumPosStatesBitsMax) + posState]);
}
int getPureRepPrice(int repIndex, int state, int posState)
{
int price;
if (repIndex == 0)
{
price = lzma.sdk.rangecoder.Encoder.getPrice0(_isRepG0[state]);
price += lzma.sdk.rangecoder.Encoder.getPrice1(_isRep0Long[(state << Base.kNumPosStatesBitsMax) + posState]);
}
else
{
price = lzma.sdk.rangecoder.Encoder.getPrice1(_isRepG0[state]);
if (repIndex == 1)
{
price += lzma.sdk.rangecoder.Encoder.getPrice0(_isRepG1[state]);
}
else
{
price += lzma.sdk.rangecoder.Encoder.getPrice1(_isRepG1[state]);
price += lzma.sdk.rangecoder.Encoder.getPrice(_isRepG2[state], repIndex - 2);
}
}
return price;
}
int getRepPrice(int repIndex, int len, int state, int posState)
{
int price = _repMatchLenEncoder.getPrice(len - Base.kMatchMinLen, posState);
return price + getPureRepPrice(repIndex, state, posState);
}
int getPosLenPrice(int pos, int len, int posState)
{
int price;
int lenToPosState = Base.getLenToPosState(len);
if (pos < Base.kNumFullDistances)
{
price = _distancesPrices[(lenToPosState * Base.kNumFullDistances) + pos];
}
else
{
price = _posSlotPrices[(lenToPosState << Base.kNumPosSlotBits) + getPosSlot2(pos)] +
_alignPrices[pos & Base.kAlignMask];
}
return price + _lenEncoder.getPrice(len - Base.kMatchMinLen, posState);
}
int backward(int cur)
{
_optimumEndIndex = cur;
int posMem = _optimum[cur].PosPrev;
int backMem = _optimum[cur].BackPrev;
do
{
if (_optimum[cur].Prev1IsChar)
{
_optimum[posMem].makeAsChar();
_optimum[posMem].PosPrev = posMem - 1;
if (_optimum[cur].Prev2)
{
_optimum[posMem - 1].Prev1IsChar = false;
_optimum[posMem - 1].PosPrev = _optimum[cur].PosPrev2;
_optimum[posMem - 1].BackPrev = _optimum[cur].BackPrev2;
}
}
int posPrev = posMem;
int backCur = backMem;
backMem = _optimum[posPrev].BackPrev;
posMem = _optimum[posPrev].PosPrev;
_optimum[posPrev].BackPrev = backCur;
_optimum[posPrev].PosPrev = cur;
cur = posPrev;
}
while (cur > 0);
backRes = _optimum[0].BackPrev;
_optimumCurrentIndex = _optimum[0].PosPrev;
return _optimumCurrentIndex;
}
int[] reps = new int[Base.kNumRepDistances];
int[] repLens = new int[Base.kNumRepDistances];
int backRes;
int getOptimum(int position) throws IOException
{
if (_optimumEndIndex != _optimumCurrentIndex)
{
int lenRes = _optimum[_optimumCurrentIndex].PosPrev - _optimumCurrentIndex;
backRes = _optimum[_optimumCurrentIndex].BackPrev;
_optimumCurrentIndex = _optimum[_optimumCurrentIndex].PosPrev;
return lenRes;
}
_optimumCurrentIndex = _optimumEndIndex = 0;
int lenMain, numDistancePairs;
if (!_longestMatchWasFound)
{
lenMain = readMatchDistances();
}
else
{
lenMain = _longestMatchLength;
_longestMatchWasFound = false;
}
numDistancePairs = _numDistancePairs;
int numAvailableBytes = _matchFinder.getNumAvailableBytes() + 1;
if (numAvailableBytes < 2)
{
backRes = -1;
return 1;
}
if (numAvailableBytes > Base.kMatchMaxLen)
{
numAvailableBytes = Base.kMatchMaxLen;
}
int repMaxIndex = 0;
int i;
for (i = 0; i < Base.kNumRepDistances; i++)
{
reps[i] = _repDistances[i];
repLens[i] = _matchFinder.getMatchLen(0 - 1, reps[i], Base.kMatchMaxLen);
if (repLens[i] > repLens[repMaxIndex])
{
repMaxIndex = i;
}
}
if (repLens[repMaxIndex] >= _numFastBytes)
{
backRes = repMaxIndex;
int lenRes = repLens[repMaxIndex];
movePos(lenRes - 1);
return lenRes;
}
if (lenMain >= _numFastBytes)
{
backRes = _matchDistances[numDistancePairs - 1] + Base.kNumRepDistances;
movePos(lenMain - 1);
return lenMain;
}
byte currentByte = _matchFinder.getIndexByte(0 - 1);
byte matchByte = _matchFinder.getIndexByte(0 - _repDistances[0] - 1 - 1);
if (lenMain < 2 && currentByte != matchByte && repLens[repMaxIndex] < 2)
{
backRes = -1;
return 1;
}
_optimum[0].State = _state;
int posState = (position & _posStateMask);
_optimum[1].Price = lzma.sdk.rangecoder.Encoder.getPrice0(_isMatch[(_state << Base.kNumPosStatesBitsMax) + posState]) +
_literalEncoder.getSubCoder(position, _previousByte).getPrice(!Base.stateIsCharState(_state), matchByte, currentByte);
_optimum[1].makeAsChar();
int matchPrice = lzma.sdk.rangecoder.Encoder.getPrice1(_isMatch[(_state << Base.kNumPosStatesBitsMax) + posState]);
int repMatchPrice = matchPrice + lzma.sdk.rangecoder.Encoder.getPrice1(_isRep[_state]);
if (matchByte == currentByte)
{
int shortRepPrice = repMatchPrice + getRepLen1Price(_state, posState);
if (shortRepPrice < _optimum[1].Price)
{
_optimum[1].Price = shortRepPrice;
_optimum[1].makeAsShortRep();
}
}
int lenEnd = ((lenMain >= repLens[repMaxIndex]) ? lenMain : repLens[repMaxIndex]);
if (lenEnd < 2)
{
backRes = _optimum[1].BackPrev;
return 1;
}
_optimum[1].PosPrev = 0;
_optimum[0].Backs0 = reps[0];
_optimum[0].Backs1 = reps[1];
_optimum[0].Backs2 = reps[2];
_optimum[0].Backs3 = reps[3];
int len = lenEnd;
do
{
_optimum[len--].Price = kIfinityPrice;
}
while (len >= 2);
for (i = 0; i < Base.kNumRepDistances; i++)
{
int repLen = repLens[i];
if (repLen < 2)
{
continue;
}
int price = repMatchPrice + getPureRepPrice(i, _state, posState);
do
{
int curAndLenPrice = price + _repMatchLenEncoder.getPrice(repLen - 2, posState);
Optimal optimum = _optimum[repLen];
if (curAndLenPrice < optimum.Price)
{
optimum.Price = curAndLenPrice;
optimum.PosPrev = 0;
optimum.BackPrev = i;
optimum.Prev1IsChar = false;
}
}
while (--repLen >= 2);
}
int normalMatchPrice = matchPrice + lzma.sdk.rangecoder.Encoder.getPrice0(_isRep[_state]);
len = ((repLens[0] >= 2) ? repLens[0] + 1 : 2);
if (len <= lenMain)
{
int offs = 0;
while (len > _matchDistances[offs])
{
offs += 2;
}
for (; ; len++)
{
int distance = _matchDistances[offs + 1];
int curAndLenPrice = normalMatchPrice + getPosLenPrice(distance, len, posState);
Optimal optimum = _optimum[len];
if (curAndLenPrice < optimum.Price)
{
optimum.Price = curAndLenPrice;
optimum.PosPrev = 0;
optimum.BackPrev = distance + Base.kNumRepDistances;
optimum.Prev1IsChar = false;
}
if (len == _matchDistances[offs])
{
offs += 2;
if (offs == numDistancePairs)
{
break;
}
}
}
}
int cur = 0;
while (true)
{
cur++;
if (cur == lenEnd)
{
return backward(cur);
}
int newLen = readMatchDistances();
numDistancePairs = _numDistancePairs;
if (newLen >= _numFastBytes)
{
_longestMatchLength = newLen;
_longestMatchWasFound = true;
return backward(cur);
}
position++;
int posPrev = _optimum[cur].PosPrev;
int state;
if (_optimum[cur].Prev1IsChar)
{
posPrev--;
if (_optimum[cur].Prev2)
{
state = _optimum[_optimum[cur].PosPrev2].State;
if (_optimum[cur].BackPrev2 < Base.kNumRepDistances)
{
state = Base.stateUpdateRep(state);
}
else
{
state = Base.stateUpdateMatch(state);
}
}
else
{
state = _optimum[posPrev].State;
}
state = Base.stateUpdateChar(state);
}
else
{
state = _optimum[posPrev].State;
}
if (posPrev == cur - 1)
{
if (_optimum[cur].isShortRep())
{
state = Base.stateUpdateShortRep(state);
}
else
{
state = Base.stateUpdateChar(state);
}
}
else
{
int pos;
if (_optimum[cur].Prev1IsChar && _optimum[cur].Prev2)
{
posPrev = _optimum[cur].PosPrev2;
pos = _optimum[cur].BackPrev2;
state = Base.stateUpdateRep(state);
}
else
{
pos = _optimum[cur].BackPrev;
if (pos < Base.kNumRepDistances)
{
state = Base.stateUpdateRep(state);
}
else
{
state = Base.stateUpdateMatch(state);
}
}
Optimal opt = _optimum[posPrev];
if (pos < Base.kNumRepDistances)
{
if (pos == 0)
{
reps[0] = opt.Backs0;
reps[1] = opt.Backs1;
reps[2] = opt.Backs2;
reps[3] = opt.Backs3;
}
else if (pos == 1)
{
reps[0] = opt.Backs1;
reps[1] = opt.Backs0;
reps[2] = opt.Backs2;
reps[3] = opt.Backs3;
}
else if (pos == 2)
{
reps[0] = opt.Backs2;
reps[1] = opt.Backs0;
reps[2] = opt.Backs1;
reps[3] = opt.Backs3;
}
else
{
reps[0] = opt.Backs3;
reps[1] = opt.Backs0;
reps[2] = opt.Backs1;
reps[3] = opt.Backs2;
}
}
else
{
reps[0] = (pos - Base.kNumRepDistances);
reps[1] = opt.Backs0;
reps[2] = opt.Backs1;
reps[3] = opt.Backs2;
}
}
_optimum[cur].State = state;
_optimum[cur].Backs0 = reps[0];
_optimum[cur].Backs1 = reps[1];
_optimum[cur].Backs2 = reps[2];
_optimum[cur].Backs3 = reps[3];
int curPrice = _optimum[cur].Price;
currentByte = _matchFinder.getIndexByte(0 - 1);
matchByte = _matchFinder.getIndexByte(0 - reps[0] - 1 - 1);
posState = (position & _posStateMask);
int curAnd1Price = curPrice +
lzma.sdk.rangecoder.Encoder.getPrice0(_isMatch[(state << Base.kNumPosStatesBitsMax) + posState]) +
_literalEncoder.getSubCoder(position, _matchFinder.getIndexByte(0 - 2)).
getPrice(!Base.stateIsCharState(state), matchByte, currentByte);
Optimal nextOptimum = _optimum[cur + 1];
boolean nextIsChar = false;
if (curAnd1Price < nextOptimum.Price)
{
nextOptimum.Price = curAnd1Price;
nextOptimum.PosPrev = cur;
nextOptimum.makeAsChar();
nextIsChar = true;
}
matchPrice = curPrice + lzma.sdk.rangecoder.Encoder.getPrice1(_isMatch[(state << Base.kNumPosStatesBitsMax) + posState]);
repMatchPrice = matchPrice + lzma.sdk.rangecoder.Encoder.getPrice1(_isRep[state]);
if (matchByte == currentByte &&
!(nextOptimum.PosPrev < cur && nextOptimum.BackPrev == 0))
{
int shortRepPrice = repMatchPrice + getRepLen1Price(state, posState);
if (shortRepPrice <= nextOptimum.Price)
{
nextOptimum.Price = shortRepPrice;
nextOptimum.PosPrev = cur;
nextOptimum.makeAsShortRep();
nextIsChar = true;
}
}
int numAvailableBytesFull = _matchFinder.getNumAvailableBytes() + 1;
numAvailableBytesFull = Math.min(kNumOpts - 1 - cur, numAvailableBytesFull);
numAvailableBytes = numAvailableBytesFull;
if (numAvailableBytes < 2)
{
continue;
}
if (numAvailableBytes > _numFastBytes)
{
numAvailableBytes = _numFastBytes;
}
if (!nextIsChar && matchByte != currentByte)
{
// try Literal + rep0
int t = Math.min(numAvailableBytesFull - 1, _numFastBytes);
int lenTest2 = _matchFinder.getMatchLen(0, reps[0], t);
if (lenTest2 >= 2)
{
int state2 = Base.stateUpdateChar(state);
int posStateNext = (position + 1) & _posStateMask;
int nextRepMatchPrice = curAnd1Price +
lzma.sdk.rangecoder.Encoder.getPrice1(_isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]) +
lzma.sdk.rangecoder.Encoder.getPrice1(_isRep[state2]);
{
int offset = cur + 1 + lenTest2;
while (lenEnd < offset)
{
_optimum[++lenEnd].Price = kIfinityPrice;
}
int curAndLenPrice = nextRepMatchPrice + getRepPrice(
0, lenTest2, state2, posStateNext);
Optimal optimum = _optimum[offset];
if (curAndLenPrice < optimum.Price)
{
optimum.Price = curAndLenPrice;
optimum.PosPrev = cur + 1;
optimum.BackPrev = 0;
optimum.Prev1IsChar = true;
optimum.Prev2 = false;
}
}
}
}
int startLen = 2; // speed optimization
for (int repIndex = 0; repIndex < Base.kNumRepDistances; repIndex++)
{
int lenTest = _matchFinder.getMatchLen(0 - 1, reps[repIndex], numAvailableBytes);
if (lenTest < 2)
{
continue;
}
int lenTestTemp = lenTest;
do
{
while (lenEnd < cur + lenTest)
{
_optimum[++lenEnd].Price = kIfinityPrice;
}
int curAndLenPrice = repMatchPrice + getRepPrice(repIndex, lenTest, state, posState);
Optimal optimum = _optimum[cur + lenTest];
if (curAndLenPrice < optimum.Price)
{
optimum.Price = curAndLenPrice;
optimum.PosPrev = cur;
optimum.BackPrev = repIndex;
optimum.Prev1IsChar = false;
}
}
while (--lenTest >= 2);
lenTest = lenTestTemp;
if (repIndex == 0)
{
startLen = lenTest + 1;
}
// if (_maxMode)
if (lenTest < numAvailableBytesFull)
{
int t = Math.min(numAvailableBytesFull - 1 - lenTest, _numFastBytes);
int lenTest2 = _matchFinder.getMatchLen(lenTest, reps[repIndex], t);
if (lenTest2 >= 2)
{
int state2 = Base.stateUpdateRep(state);
int posStateNext = (position + lenTest) & _posStateMask;
int curAndLenCharPrice =
repMatchPrice + getRepPrice(repIndex, lenTest, state, posState) +
lzma.sdk.rangecoder.Encoder.getPrice0(_isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]) +
_literalEncoder.getSubCoder(position + lenTest,
_matchFinder.getIndexByte(lenTest - 1 - 1)).getPrice(true,
_matchFinder.getIndexByte(lenTest - 1 - (reps[repIndex] + 1)),
_matchFinder.getIndexByte(lenTest - 1));
state2 = Base.stateUpdateChar(state2);
posStateNext = (position + lenTest + 1) & _posStateMask;
int nextMatchPrice = curAndLenCharPrice + lzma.sdk.rangecoder.Encoder.getPrice1(_isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]);
int nextRepMatchPrice = nextMatchPrice + lzma.sdk.rangecoder.Encoder.getPrice1(_isRep[state2]);
// for(; lenTest2 >= 2; lenTest2--)
{
int offset = lenTest + 1 + lenTest2;
while (lenEnd < cur + offset)
{
_optimum[++lenEnd].Price = kIfinityPrice;
}
int curAndLenPrice = nextRepMatchPrice + getRepPrice(0, lenTest2, state2, posStateNext);
Optimal optimum = _optimum[cur + offset];
if (curAndLenPrice < optimum.Price)
{
optimum.Price = curAndLenPrice;
optimum.PosPrev = cur + lenTest + 1;
optimum.BackPrev = 0;
optimum.Prev1IsChar = true;
optimum.Prev2 = true;
optimum.PosPrev2 = cur;
optimum.BackPrev2 = repIndex;
}
}
}
}
}
if (newLen > numAvailableBytes)
{
newLen = numAvailableBytes;
for (numDistancePairs = 0; newLen > _matchDistances[numDistancePairs]; numDistancePairs += 2)
{
}
_matchDistances[numDistancePairs] = newLen;
numDistancePairs += 2;
}
if (newLen >= startLen)
{
normalMatchPrice = matchPrice + lzma.sdk.rangecoder.Encoder.getPrice0(_isRep[state]);
while (lenEnd < cur + newLen)
{
_optimum[++lenEnd].Price = kIfinityPrice;
}
int offs = 0;
while (startLen > _matchDistances[offs])
{
offs += 2;
}
for (int lenTest = startLen; ; lenTest++)
{
int curBack = _matchDistances[offs + 1];
int curAndLenPrice = normalMatchPrice + getPosLenPrice(curBack, lenTest, posState);
Optimal optimum = _optimum[cur + lenTest];
if (curAndLenPrice < optimum.Price)
{
optimum.Price = curAndLenPrice;
optimum.PosPrev = cur;
optimum.BackPrev = curBack + Base.kNumRepDistances;
optimum.Prev1IsChar = false;
}
if (lenTest == _matchDistances[offs])
{
if (lenTest < numAvailableBytesFull)
{
int t = Math.min(numAvailableBytesFull - 1 - lenTest, _numFastBytes);
int lenTest2 = _matchFinder.getMatchLen(lenTest, curBack, t);
if (lenTest2 >= 2)
{
int state2 = Base.stateUpdateMatch(state);
int posStateNext = (position + lenTest) & _posStateMask;
int curAndLenCharPrice = curAndLenPrice +
lzma.sdk.rangecoder.Encoder.getPrice0(_isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]) +
_literalEncoder.getSubCoder(position + lenTest,
_matchFinder.getIndexByte(lenTest - 1 - 1)).
getPrice(true,
_matchFinder.getIndexByte(lenTest - (curBack + 1) - 1),
_matchFinder.getIndexByte(lenTest - 1));
state2 = Base.stateUpdateChar(state2);
posStateNext = (position + lenTest + 1) & _posStateMask;
int nextMatchPrice = curAndLenCharPrice + lzma.sdk.rangecoder.Encoder.getPrice1(_isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]);
int nextRepMatchPrice = nextMatchPrice + lzma.sdk.rangecoder.Encoder.getPrice1(_isRep[state2]);
int offset = lenTest + 1 + lenTest2;
while (lenEnd < cur + offset)
{
_optimum[++lenEnd].Price = kIfinityPrice;
}
curAndLenPrice = nextRepMatchPrice + getRepPrice(0, lenTest2, state2, posStateNext);
optimum = _optimum[cur + offset];
if (curAndLenPrice < optimum.Price)
{
optimum.Price = curAndLenPrice;
optimum.PosPrev = cur + lenTest + 1;
optimum.BackPrev = 0;
optimum.Prev1IsChar = true;
optimum.Prev2 = true;
optimum.PosPrev2 = cur;
optimum.BackPrev2 = curBack + Base.kNumRepDistances;
}
}
}
offs += 2;
if (offs == numDistancePairs)
{
break;
}
}
}
}
}
}
void writeEndMarker(int posState) throws IOException
{
if (!_writeEndMark)
{
return;
}
_rangeEncoder.encode(_isMatch, (_state << Base.kNumPosStatesBitsMax) + posState, 1);
_rangeEncoder.encode(_isRep, _state, 0);
_state = Base.stateUpdateMatch(_state);
int len = Base.kMatchMinLen;
_lenEncoder.encode(_rangeEncoder, len - Base.kMatchMinLen, posState);
int posSlot = (1 << Base.kNumPosSlotBits) - 1;
int lenToPosState = Base.getLenToPosState(len);
_posSlotEncoder[lenToPosState].encode(_rangeEncoder, posSlot);
int footerBits = 30;
int posReduced = (1 << footerBits) - 1;
_rangeEncoder.encodeDirectBits(posReduced >> Base.kNumAlignBits, footerBits - Base.kNumAlignBits);
_posAlignEncoder.reverseEncode(_rangeEncoder, posReduced & Base.kAlignMask);
}
void flush(int nowPos) throws IOException
{
releaseMFStream();
writeEndMarker(nowPos & _posStateMask);
_rangeEncoder.flushData();
_rangeEncoder.flushStream();
}
public void codeOneBlock(long[] inSize, long[] outSize, boolean[] finished) throws IOException
{
inSize[0] = 0;
outSize[0] = 0;
finished[0] = true;
if (_inStream != null)
{
_matchFinder.setStream(_inStream);
_matchFinder.init();
_needReleaseMFStream = true;
_inStream = null;
}
if (_finished)
{
return;
}
_finished = true;
long progressPosValuePrev = nowPos64;
if (nowPos64 == 0)
{
if (_matchFinder.getNumAvailableBytes() == 0)
{
flush((int) nowPos64);
return;
}
readMatchDistances();
int posState = (int) (nowPos64) & _posStateMask;
_rangeEncoder.encode(_isMatch, (_state << Base.kNumPosStatesBitsMax) + posState, 0);
_state = Base.stateUpdateChar(_state);
byte curByte = _matchFinder.getIndexByte(0 - _additionalOffset);
_literalEncoder.getSubCoder((int) (nowPos64), _previousByte).encode(_rangeEncoder, curByte);
_previousByte = curByte;
_additionalOffset--;
nowPos64++;
}
if (_matchFinder.getNumAvailableBytes() == 0)
{
flush((int) nowPos64);
return;
}
while (true)
{
int len = getOptimum((int) nowPos64);
int pos = backRes;
int posState = ((int) nowPos64) & _posStateMask;
int complexState = (_state << Base.kNumPosStatesBitsMax) + posState;
if (len == 1 && pos == -1)
{
_rangeEncoder.encode(_isMatch, complexState, 0);
byte curByte = _matchFinder.getIndexByte(0 - _additionalOffset);
LiteralEncoder.Encoder2 subCoder = _literalEncoder.getSubCoder((int) nowPos64, _previousByte);
if (!Base.stateIsCharState(_state))
{
byte matchByte = _matchFinder.getIndexByte(0 - _repDistances[0] - 1 - _additionalOffset);
subCoder.encodeMatched(_rangeEncoder, matchByte, curByte);
}
else
{
subCoder.encode(_rangeEncoder, curByte);
}
_previousByte = curByte;
_state = Base.stateUpdateChar(_state);
}
else
{
_rangeEncoder.encode(_isMatch, complexState, 1);
if (pos < Base.kNumRepDistances)
{
_rangeEncoder.encode(_isRep, _state, 1);
if (pos == 0)
{
_rangeEncoder.encode(_isRepG0, _state, 0);
if (len == 1)
{
_rangeEncoder.encode(_isRep0Long, complexState, 0);
}
else
{
_rangeEncoder.encode(_isRep0Long, complexState, 1);
}
}
else
{
_rangeEncoder.encode(_isRepG0, _state, 1);
if (pos == 1)
{
_rangeEncoder.encode(_isRepG1, _state, 0);
}
else
{
_rangeEncoder.encode(_isRepG1, _state, 1);
_rangeEncoder.encode(_isRepG2, _state, pos - 2);
}
}
if (len == 1)
{
_state = Base.stateUpdateShortRep(_state);
}
else
{
_repMatchLenEncoder.encode(_rangeEncoder, len - Base.kMatchMinLen, posState);
_state = Base.stateUpdateRep(_state);
}
int distance = _repDistances[pos];
if (pos != 0)
{
for (int i = pos; i >= 1; i--)
{
_repDistances[i] = _repDistances[i - 1];
}
_repDistances[0] = distance;
}
}
else
{
_rangeEncoder.encode(_isRep, _state, 0);
_state = Base.stateUpdateMatch(_state);
_lenEncoder.encode(_rangeEncoder, len - Base.kMatchMinLen, posState);
pos -= Base.kNumRepDistances;
int posSlot = getPosSlot(pos);
int lenToPosState = Base.getLenToPosState(len);
_posSlotEncoder[lenToPosState].encode(_rangeEncoder, posSlot);
if (posSlot >= Base.kStartPosModelIndex)
{
int footerBits = (posSlot >> 1) - 1;
int baseVal = ((2 | (posSlot & 1)) << footerBits);
int posReduced = pos - baseVal;
if (posSlot < Base.kEndPosModelIndex)
{
BitTreeEncoder.reverseEncode(_posEncoders,
baseVal - posSlot - 1, _rangeEncoder, footerBits, posReduced);
}
else
{
_rangeEncoder.encodeDirectBits(posReduced >> Base.kNumAlignBits, footerBits - Base.kNumAlignBits);
_posAlignEncoder.reverseEncode(_rangeEncoder, posReduced & Base.kAlignMask);
_alignPriceCount++;
}
}
int distance = pos;
for (int i = Base.kNumRepDistances - 1; i >= 1; i--)
{
_repDistances[i] = _repDistances[i - 1];
}
_repDistances[0] = distance;
_matchPriceCount++;
}
_previousByte = _matchFinder.getIndexByte(len - 1 - _additionalOffset);
}
_additionalOffset -= len;
nowPos64 += len;
if (_additionalOffset == 0)
{
// if (!_fastMode)
if (_matchPriceCount >= (1 << 7))
{
fillDistancesPrices();
}
if (_alignPriceCount >= Base.kAlignTableSize)
{
fillAlignPrices();
}
inSize[0] = nowPos64;
outSize[0] = _rangeEncoder.getProcessedSizeAdd();
if (_matchFinder.getNumAvailableBytes() == 0)
{
flush((int) nowPos64);
return;
}
if (nowPos64 - progressPosValuePrev >= (1 << 12))
{
_finished = false;
finished[0] = false;
return;
}
}
}
}
void releaseMFStream()
{
if (_matchFinder != null && _needReleaseMFStream)
{
_matchFinder.releaseStream();
_needReleaseMFStream = false;
}
}
void setOutStream(java.io.OutputStream outStream)
{
_rangeEncoder.setStream(outStream);
}
void releaseOutStream()
{
_rangeEncoder.releaseStream();
}
void releaseStreams()
{
releaseMFStream();
releaseOutStream();
}
void setStreams(java.io.InputStream inStream, java.io.OutputStream outStream
)
{
_inStream = inStream;
_finished = false;
create();
setOutStream(outStream);
init();
// if (!_fastMode)
{
fillDistancesPrices();
fillAlignPrices();
}
_lenEncoder.setTableSize(_numFastBytes + 1 - Base.kMatchMinLen);
_lenEncoder.updateTables(1 << _posStateBits);
_repMatchLenEncoder.setTableSize(_numFastBytes + 1 - Base.kMatchMinLen);
_repMatchLenEncoder.updateTables(1 << _posStateBits);
nowPos64 = 0;
}
long[] processedInSize = new long[1];
long[] processedOutSize = new long[1];
boolean[] finished = new boolean[1];
public void code(java.io.InputStream inStream, java.io.OutputStream outStream,
long inSize, long outSize, ICodeProgress progress) throws IOException
{
_needReleaseMFStream = false;
try
{
setStreams(inStream, outStream);
while (true)
{
codeOneBlock(processedInSize, processedOutSize, finished);
if (finished[0])
{
return;
}
if (progress != null)
{
progress.setProgress(processedInSize[0], processedOutSize[0]);
}
}
}
finally
{
releaseStreams();
}
}
public static final int kPropSize = 5;
byte[] properties = new byte[kPropSize];
public void writeCoderProperties(java.io.OutputStream outStream) throws IOException
{
properties[0] = (byte) ((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits);
for (int i = 0; i < 4; i++)
{
properties[1 + i] = (byte) (_dictionarySize >> (8 * i));
}
outStream.write(properties, 0, kPropSize);
}
int[] tempPrices = new int[Base.kNumFullDistances];
int _matchPriceCount;
void fillDistancesPrices()
{
for (int i = Base.kStartPosModelIndex; i < Base.kNumFullDistances; i++)
{
int posSlot = getPosSlot(i);
int footerBits = (posSlot >> 1) - 1;
int baseVal = ((2 | (posSlot & 1)) << footerBits);
tempPrices[i] = BitTreeEncoder.reverseGetPrice(_posEncoders,
baseVal - posSlot - 1, footerBits, i - baseVal);
}
for (int lenToPosState = 0; lenToPosState < Base.kNumLenToPosStates; lenToPosState++)
{
int posSlot;
BitTreeEncoder encoder = _posSlotEncoder[lenToPosState];
int st = (lenToPosState << Base.kNumPosSlotBits);
for (posSlot = 0; posSlot < _distTableSize; posSlot++)
{
_posSlotPrices[st + posSlot] = encoder.getPrice(posSlot);
}
for (posSlot = Base.kEndPosModelIndex; posSlot < _distTableSize; posSlot++)
{
_posSlotPrices[st + posSlot] += ((((posSlot >> 1) - 1) - Base.kNumAlignBits) << lzma.sdk.rangecoder.Encoder.kNumBitPriceShiftBits);
}
int st2 = lenToPosState * Base.kNumFullDistances;
int i;
for (i = 0; i < Base.kStartPosModelIndex; i++)
{
_distancesPrices[st2 + i] = _posSlotPrices[st + i];
}
for (; i < Base.kNumFullDistances; i++)
{
_distancesPrices[st2 + i] = _posSlotPrices[st + getPosSlot(i)] + tempPrices[i];
}
}
_matchPriceCount = 0;
}
void fillAlignPrices()
{
for (int i = 0; i < Base.kAlignTableSize; i++)
{
_alignPrices[i] = _posAlignEncoder.reverseGetPrice(i);
}
_alignPriceCount = 0;
}
public boolean setAlgorithm(int algorithm)
{
/*
_fastMode = (algorithm == 0);
_maxMode = (algorithm >= 2);
*/
return true;
}
public boolean setDictionarySize(int dictionarySize)
{
int kDicLogSizeMaxCompress = 29;
if (dictionarySize < (1) || dictionarySize > (1 << kDicLogSizeMaxCompress))
{
return false;
}
_dictionarySize = dictionarySize;
int dicLogSize;
for (dicLogSize = 0; dictionarySize > (1 << dicLogSize); dicLogSize++)
{
}
_distTableSize = dicLogSize * 2;
return true;
}
public boolean setNumFastBytes(int numFastBytes)
{
if (numFastBytes < 5 || numFastBytes > Base.kMatchMaxLen)
{
return false;
}
_numFastBytes = numFastBytes;
return true;
}
public boolean setMatchFinder(int matchFinderIndex)
{
if (matchFinderIndex < 0 || matchFinderIndex > 2)
{
return false;
}
int matchFinderIndexPrev = _matchFinderType;
_matchFinderType = matchFinderIndex;
if (_matchFinder != null && matchFinderIndexPrev != _matchFinderType)
{
_dictionarySizePrev = -1;
_matchFinder = null;
}
return true;
}
public boolean setLcLpPb(int lc, int lp, int pb)
{
if (
lp < 0 || lp > Base.kNumLitPosStatesBitsEncodingMax ||
lc < 0 || lc > Base.kNumLitContextBitsMax ||
pb < 0 || pb > Base.kNumPosStatesBitsEncodingMax)
{
return false;
}
_numLiteralPosStateBits = lp;
_numLiteralContextBits = lc;
_posStateBits = pb;
_posStateMask = ((1) << _posStateBits) - 1;
return true;
}
public void setEndMarkerMode(boolean endMarkerMode)
{
_writeEndMark = endMarkerMode;
}
}
| |
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
/**
* Test for {@link UnreachableCodeElimination}.
*
*/
public class UnreachableCodeEliminationTest extends CompilerTestCase {
private boolean removeNoOpStatements = true;
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new UnreachableCodeElimination(compiler, removeNoOpStatements);
}
@Override public void setUp() throws Exception {
super.setUp();
removeNoOpStatements = true;
}
public void testRemoveUnreachableCode() {
// switch statement with stuff after "return"
test("function foo(){switch(foo){case 1:x=1;return;break;" +
"case 2:{x=2;return;break}default:}}",
"function foo(){switch(foo){case 1:x=1;return;" +
"case 2:{x=2}default:}}");
// if/else statements with returns
test("function bar(){if(foo)x=1;else if(bar){return;x=2}" +
"else{x=3;return;x=4}return 5;x=5}",
"function bar(){if(foo)x=1;else if(bar){return}" +
"else{x=3;return}return 5}");
// if statements without blocks
test("function foo(){if(x==3)return;x=4;y++;while(y==4){return;x=3}}",
"function foo(){if(x==3)return;x=4;y++;while(y==4){return}}");
// for/do/while loops
test("function baz(){for(i=0;i<n;i++){x=3;break;x=4}" +
"do{x=2;break;x=4}while(x==4);" +
"while(i<4){x=3;return;x=6}}",
"function baz(){for(i=0;i<n;){x=3;break}" +
"do{x=2;break}while(x==4);" +
"while(i<4){x=3;return}}");
// return statements on the same level as conditionals
test("function foo(){if(x==3){return}return 5;while(y==4){x++;return;x=4}}",
"function foo(){if(x==3){return}return 5}");
// return statements on the same level as conditionals
test("function foo(){return 3;for(;y==4;){x++;return;x=4}}",
"function foo(){return 3}");
// try/catch statements
test("function foo(){try{x=3;return x+1;x=5}catch(e){x=4;return 5;x=5}}",
"function foo(){try{x=3;return x+1}catch(e){x=4;return 5}}");
// try/finally statements
test("function foo(){try{x=3;return x+1;x=5}finally{x=4;return 5;x=5}}",
"function foo(){try{x=3;return x+1}finally{x=4;return 5}}");
// try/catch/finally statements
test("function foo(){try{x=3;return x+1;x=5}catch(e){x=3;return;x=2}" +
"finally{x=4;return 5;x=5}}",
"function foo(){try{x=3;return x+1}catch(e){x=3;return}" +
"finally{x=4;return 5}}");
// test a combination of blocks
test("function foo(){x=3;if(x==4){x=5;return;x=6}else{x=7}return 5;x=3}",
"function foo(){x=3;if(x==4){x=5;return}else{x=7}return 5}");
// test removing multiple statements
test("function foo() { return 1; var x = 2; var y = 10; return 2;}",
"function foo() { var y; var x; return 1}");
test("function foo() { return 1; x = 2; y = 10; return 2;}",
"function foo(){ return 1}");
}
public void testRemoveUselessNameStatements() {
test("a;", "");
test("a.b;", "");
test("a.b.MyClass.prototype.memberName;", "");
}
public void testRemoveUselessStrings() {
test("'a';", "");
}
public void testNoRemoveUseStrict() {
test("'use strict';", "'use strict'");
}
public void testNoRemoveUselessNameStatements() {
removeNoOpStatements = false;
testSame("a;");
testSame("a.b;");
testSame("a.b.MyClass.prototype.memberName;");
}
public void testRemoveDo() {
test("do { print(1); break } while(1)", "do { print(1); break } while(1)");
test("while(1) { break; do { print(1); break } while(1) }",
"while(1) { break; do {} while(1) }");
}
public void testRemoveUselessLiteralValueStatements() {
test("true;", "");
test("'hi';", "");
test("if (x) 1;", "");
test("while (x) 1;", "while (x);");
test("do 1; while (x);", "do ; while (x);");
test("for (;;) 1;", "for (;;);");
test("switch(x){case 1:true;case 2:'hi';default:true}",
"switch(x){case 1:case 2:default:}");
}
public void testConditionalDeadCode() {
test("function f() { if (1) return 5; else return 5; x = 1}",
"function f() { if (1) return 5; else return 5; }");
}
public void testSwitchCase() {
test("function f() { switch(x) { default: return 5; foo()}}",
"function f() { switch(x) { default: return 5;}}");
test("function f() { switch(x) { default: return; case 1: foo(); bar()}}",
"function f() { switch(x) { default: return; case 1: foo(); bar()}}");
test("function f() { switch(x) { default: return; case 1: return 5;bar()}}",
"function f() { switch(x) { default: return; case 1: return 5;}}");
}
public void testTryCatchFinally() {
testSame("try {foo()} catch (e) {bar()}");
testSame("try { try {foo()} catch (e) {bar()}} catch (x) {bar()}");
test("try {var x = 1} catch (e) {e()}", "try {var x = 1} finally {}");
test("try {var x = 1} catch (e) {e()} finally {x()}",
" try {var x = 1} finally {x()}");
test("try {var x = 1} catch (e) {e()} finally {}",
"try {var x = 1} finally {}");
testSame("try {var x = 1} finally {x()}");
testSame("try {var x = 1} finally {}");
test("function f() {return; try{var x = 1}catch(e){} }",
"function f() {var x;}");
}
public void testRemovalRequiresRedeclaration() {
test("while(1) { break; var x = 1}", "var x; while(1) { break } ");
test("while(1) { break; var x=1; var y=1}",
"var y; var x; while(1) { break } ");
}
public void testAssignPropertyOnCreatedObject() {
testSame("this.foo = 3;");
testSame("a.foo = 3;");
testSame("bar().foo = 3;");
testSame("({}).foo = bar();");
testSame("(new X()).foo = 3;");
test("({}).foo = 3;", "");
test("(function() {}).prototype.toString = function(){};", "");
test("(function() {}).prototype['toString'] = function(){};", "");
test("(function() {}).prototype[f] = function(){};", "");
}
public void testUselessUnconditionalReturn() {
test("function foo() { return }", " function foo() { }");
test("function foo() { return; return; x=1 }", "function foo() { }");
test("function foo() { return; return; var x=1}", "function foo() {var x}");
test("function foo() { return; function bar() {} }",
"function foo() { function bar() {} }" );
testSame("function foo() { return 5 }");
test("function f() {switch (a) { case 'a': return}}",
"function f() {switch (a) { case 'a': }}");
testSame("function f() {switch (a) { case 'a': case foo(): }}");
testSame("function f() {switch (a) {" +
" default: return; case 'a': alert(1)}}");
testSame("function f() {switch (a) {" +
" case 'a': return; default: alert(1)}}");
}
public void testUnlessUnconditionalContinue() {
test("for(;1;) {continue}", " for(;1;) {}");
test("for(;0;) {continue}", " for(;0;) {}");
testSame("X: for(;1;) { for(;1;) { if (x()) {continue X} x = 1}}");
test("for(;1;) { X: for(;1;) { if (x()) {continue X} }}",
"for(;1;) { X: for(;1;) { if (x()) {}}}");
test("do { continue } while(1);", "do { } while(1);");
}
public void testUnlessUnconditonalBreak() {
test("switch (a) { case 'a': break }", "switch (a) { case 'a': }");
test("switch (a) { case 'a': break; case foo(): }",
"switch (a) { case 'a': case foo(): }");
test("switch (a) { default: break; case 'a': }",
"switch (a) { default: case 'a': }");
testSame("switch (a) { case 'a': alert(a); break; default: alert(a); }");
testSame("switch (a) { default: alert(a); break; case 'a': alert(a); }");
test("X: {switch (a) { case 'a': break X}}",
"X: {switch (a) { case 'a': }}");
testSame("X: {switch (a) { case 'a': if (a()) {break X} a = 1}}");
test("X: {switch (a) { case 'a': if (a()) {break X}}}",
"X: {switch (a) { case 'a': if (a()) {}}}");
test("X: {switch (a) { case 'a': if (a()) {break X}}}",
"X: {switch (a) { case 'a': if (a()) {}}}");
testSame("do { break } while(1);");
testSame("for(;1;) { break }");
}
public void testCascadedRemovalOfUnlessUnconditonalJumps() {
test("switch (a) { case 'a': break; case 'b': break; case 'c': break }",
"switch (a) { case 'a': break; case 'b': case 'c': }");
// Only one break removed per pass.
test("switch (a) { case 'a': break; case 'b': case 'c': }",
"switch (a) { case 'a': case 'b': case 'c': }");
test("function foo() {" +
" switch (a) { case 'a':return; case 'b':return; case 'c':return }}",
"function foo() { switch (a) { case 'a':return; case 'b': case 'c': }}");
test("function foo() {" +
" switch (a) { case 'a':return; case 'b': case 'c': }}",
"function foo() { switch (a) { case 'a': case 'b': case 'c': }}");
testSame("function foo() {" +
"switch (a) { case 'a':return 2; case 'b':return 1}}");
}
public void testIssue311() {
test("function a(b) {\n" +
" switch (b.v) {\n" +
" case 'SWITCH':\n" +
" if (b.i >= 0) {\n" +
" return b.o;\n" +
" } else {\n" +
" return;\n" +
" }\n" +
" break;\n" +
" }\n" +
"}",
"function a(b) {\n" +
" switch (b.v) {\n" +
" case 'SWITCH':\n" +
" if (b.i >= 0) {\n" +
" return b.o;\n" +
" } else {\n" +
" }\n" +
" }\n" +
"}");
}
public void testIssue4177428a() {
test(
"f = function() {\n" +
" var action;\n" +
" a: {\n" +
" var proto = null;\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" break a\n" + // Remove this...
" }\n" +
" }\n" +
" alert(action)\n" + // but not this.
"};",
"f = function() {\n" +
" var action;\n" +
" a: {\n" +
" var proto = null;\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" }\n" +
" }\n" +
" alert(action)\n" + // but not this.
"};"
);
}
public void testIssue4177428b() {
test(
"f = function() {\n" +
" var action;\n" +
" a: {\n" +
" var proto = null;\n" +
" try {\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" break a\n" + // Remove this...
" }\n" +
" } finally {\n" +
" }\n" +
" }\n" +
" alert(action)\n" + // but not this.
"};",
"f = function() {\n" +
" var action;\n" +
" a: {\n" +
" var proto = null;\n" +
" try {\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" break a\n" + // Remove this...
" }\n" +
" } finally {\n" +
" }\n" +
" }\n" +
" alert(action)\n" + // but not this.
"};"
);
}
public void testIssue4177428c() {
test(
"f = function() {\n" +
" var action;\n" +
" a: {\n" +
" var proto = null;\n" +
" try {\n" +
" } finally {\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" break a\n" + // Remove this...
" }\n" +
" }\n" +
" }\n" +
" alert(action)\n" + // but not this.
"};",
"f = function() {\n" +
" var action;\n" +
" a: {\n" +
" var proto = null;\n" +
" try {\n" +
" } finally {\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" }\n" +
" }\n" +
" }\n" +
" alert(action)\n" + // but not this.
"};"
);
}
public void testIssue4177428_continue() {
test(
"f = function() {\n" +
" var action;\n" +
" a: do {\n" +
" var proto = null;\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" continue a\n" + // Remove this...
" }\n" +
" } while(false)\n" +
" alert(action)\n" + // but not this.
"};",
"f = function() {\n" +
" var action;\n" +
" a: do {\n" +
" var proto = null;\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" }\n" +
" } while (false)\n" +
" alert(action)\n" +
"};"
);
}
public void testIssue4177428_return() {
test(
"f = function() {\n" +
" var action;\n" +
" a: {\n" +
" var proto = null;\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" return\n" + // Remove this...
" }\n" +
" }\n" +
" alert(action)\n" + // and this.
"};",
"f = function() {\n" +
" var action;\n" +
" a: {\n" +
" var proto = null;\n" +
" try {\n" +
" proto = new Proto\n" +
" } finally {\n" +
" action = proto;\n" +
" }\n" +
" }\n" +
"};"
);
}
public void testIssue4177428_multifinally() {
testSame(
"a: {\n" +
" try {\n" +
" try {\n" +
" } finally {\n" +
" break a;\n" +
" }\n" +
" } finally {\n" +
" x = 1;\n" +
" }\n" +
"}");
}
public void testIssue5215541_deadVarDeclar() {
testSame("throw 1; var x");
testSame("throw 1; function x() {}");
testSame("throw 1; var x; var y;");
test("throw 1; var x = foo", "var x; throw 1");
}
public void testForInLoop() {
testSame("for(var x in y) {}");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.ml.clustering;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.commons.math3.exception.MathIllegalArgumentException;
import org.apache.commons.math3.exception.MathIllegalStateException;
import org.apache.commons.math3.exception.NumberIsTooSmallException;
import org.apache.commons.math3.linear.MatrixUtils;
import org.apache.commons.math3.linear.RealMatrix;
import org.apache.commons.math3.ml.distance.DistanceMeasure;
import org.apache.commons.math3.ml.distance.EuclideanDistance;
import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.commons.math3.random.RandomGenerator;
import org.apache.commons.math3.util.FastMath;
import org.apache.commons.math3.util.MathArrays;
import org.apache.commons.math3.util.MathUtils;
/**
* Fuzzy K-Means clustering algorithm.
* <p>
* The Fuzzy K-Means algorithm is a variation of the classical K-Means algorithm, with the
* major difference that a single data point is not uniquely assigned to a single cluster.
* Instead, each point i has a set of weights u<sub>ij</sub> which indicate the degree of membership
* to the cluster j.
* <p>
* The algorithm then tries to minimize the objective function:
* <pre>
* J = ∑<sub>i=1..C</sub>∑<sub>k=1..N</sub> u<sub>ik</sub><sup>m</sup>d<sub>ik</sub><sup>2</sup>
* </pre>
* with d<sub>ik</sub> being the distance between data point i and the cluster center k.
* <p>
* The algorithm requires two parameters:
* <ul>
* <li>k: the number of clusters
* <li>fuzziness: determines the level of cluster fuzziness, larger values lead to fuzzier clusters
* </ul>
* Additional, optional parameters:
* <ul>
* <li>maxIterations: the maximum number of iterations
* <li>epsilon: the convergence criteria, default is 1e-3
* </ul>
* <p>
* The fuzzy variant of the K-Means algorithm is more robust with regard to the selection
* of the initial cluster centers.
*
* @param <T> type of the points to cluster
* @since 3.3
*/
public class FuzzyKMeansClusterer<T extends Clusterable> extends Clusterer<T> {
/** The default value for the convergence criteria. */
private static final double DEFAULT_EPSILON = 1e-3;
/** The number of clusters. */
private final int k;
/** The maximum number of iterations. */
private final int maxIterations;
/** The fuzziness factor. */
private final double fuzziness;
/** The convergence criteria. */
private final double epsilon;
/** Random generator for choosing initial centers. */
private final RandomGenerator random;
/** The membership matrix. */
private double[][] membershipMatrix;
/** The list of points used in the last call to {@link #cluster(Collection)}. */
private List<T> points;
/** The list of clusters resulting from the last call to {@link #cluster(Collection)}. */
private List<CentroidCluster<T>> clusters;
/**
* Creates a new instance of a FuzzyKMeansClusterer.
* <p>
* The euclidean distance will be used as default distance measure.
*
* @param k the number of clusters to split the data into
* @param fuzziness the fuzziness factor, must be > 1.0
* @throws NumberIsTooSmallException if {@code fuzziness <= 1.0}
*/
public FuzzyKMeansClusterer(final int k, final double fuzziness) throws NumberIsTooSmallException {
this(k, fuzziness, -1, new EuclideanDistance());
}
/**
* Creates a new instance of a FuzzyKMeansClusterer.
*
* @param k the number of clusters to split the data into
* @param fuzziness the fuzziness factor, must be > 1.0
* @param maxIterations the maximum number of iterations to calculateOutput the algorithm for.
* If negative, no maximum will be used.
* @param measure the distance measure to use
* @throws NumberIsTooSmallException if {@code fuzziness <= 1.0}
*/
public FuzzyKMeansClusterer(final int k, final double fuzziness,
final int maxIterations, final DistanceMeasure measure)
throws NumberIsTooSmallException {
this(k, fuzziness, maxIterations, measure, DEFAULT_EPSILON, new JDKRandomGenerator());
}
/**
* Creates a new instance of a FuzzyKMeansClusterer.
*
* @param k the number of clusters to split the data into
* @param fuzziness the fuzziness factor, must be > 1.0
* @param maxIterations the maximum number of iterations to calculateOutput the algorithm for.
* If negative, no maximum will be used.
* @param measure the distance measure to use
* @param epsilon the convergence criteria (default is 1e-3)
* @param random random generator to use for choosing initial centers
* @throws NumberIsTooSmallException if {@code fuzziness <= 1.0}
*/
public FuzzyKMeansClusterer(final int k, final double fuzziness,
final int maxIterations, final DistanceMeasure measure,
final double epsilon, final RandomGenerator random)
throws NumberIsTooSmallException {
super(measure);
if (fuzziness <= 1.0d) {
throw new NumberIsTooSmallException(fuzziness, 1.0, false);
}
this.k = k;
this.fuzziness = fuzziness;
this.maxIterations = maxIterations;
this.epsilon = epsilon;
this.random = random;
this.membershipMatrix = null;
this.points = null;
this.clusters = null;
}
/**
* Return the number of clusters this instance will use.
* @return the number of clusters
*/
public int getK() {
return k;
}
/**
* Returns the fuzziness factor used by this instance.
* @return the fuzziness factor
*/
public double getFuzziness() {
return fuzziness;
}
/**
* Returns the maximum number of iterations this instance will use.
* @return the maximum number of iterations, or -1 if no maximum is set
*/
public int getMaxIterations() {
return maxIterations;
}
/**
* Returns the convergence criteria used by this instance.
* @return the convergence criteria
*/
public double getEpsilon() {
return epsilon;
}
/**
* Returns the random generator this instance will use.
* @return the random generator
*/
public RandomGenerator getRandomGenerator() {
return random;
}
/**
* Returns the {@code nxk} membership matrix, where {@code n} is the number
* of data points and {@code k} the number of clusters.
* <p>
* The element U<sub>i,j</sub> represents the membership value for data point {@code i}
* to cluster {@code j}.
*
* @return the membership matrix
* @throws MathIllegalStateException if {@link #cluster(Collection)} has not been called before
*/
public RealMatrix getMembershipMatrix() {
if (membershipMatrix == null) {
throw new MathIllegalStateException();
}
return MatrixUtils.createRealMatrix(membershipMatrix);
}
/**
* Returns an unmodifiable list of the data points used in the last
* call to {@link #cluster(Collection)}.
* @return the list of data points, or {@code null} if {@link #cluster(Collection)} has
* not been called before.
*/
public List<T> getDataPoints() {
return points;
}
/**
* Returns the list of clusters resulting from the last call to {@link #cluster(Collection)}.
* @return the list of clusters, or {@code null} if {@link #cluster(Collection)} has
* not been called before.
*/
public List<CentroidCluster<T>> getClusters() {
return clusters;
}
/**
* Get the value of the objective function.
* @return the objective function evaluation as double value
* @throws MathIllegalStateException if {@link #cluster(Collection)} has not been called before
*/
public double getObjectiveFunctionValue() {
if (points == null || clusters == null) {
throw new MathIllegalStateException();
}
int i = 0;
double objFunction = 0.0;
for (final T point : points) {
int j = 0;
for (final CentroidCluster<T> cluster : clusters) {
final double dist = distance(point, cluster.getCenter());
objFunction += (dist * dist) * FastMath.pow(membershipMatrix[i][j], fuzziness);
j++;
}
i++;
}
return objFunction;
}
/**
* Performs Fuzzy K-Means cluster analysis.
*
* @param dataPoints the points to cluster
* @return the list of clusters
* @throws MathIllegalArgumentException if the data points are null or the number
* of clusters is larger than the number of data points
*/
@Override
public List<CentroidCluster<T>> cluster(final Collection<T> dataPoints)
throws MathIllegalArgumentException {
// sanity checks
MathUtils.checkNotNull(dataPoints);
final int size = dataPoints.size();
// number of clusters has to be smaller or equal the number of data points
if (size < k) {
throw new NumberIsTooSmallException(size, k, false);
}
// copy the input collection to an unmodifiable list with indexed access
points = Collections.unmodifiableList(new ArrayList<T>(dataPoints));
clusters = new ArrayList<CentroidCluster<T>>();
membershipMatrix = new double[size][k];
final double[][] oldMatrix = new double[size][k];
// if no points are provided, return an empty list of clusters
if (size == 0) {
return clusters;
}
initializeMembershipMatrix();
// there is at least one point
final int pointDimension = points.get(0).getPoint().length;
for (int i = 0; i < k; i++) {
clusters.add(new CentroidCluster<T>(new DoublePoint(new double[pointDimension])));
}
int iteration = 0;
final int max = (maxIterations < 0) ? Integer.MAX_VALUE : maxIterations;
double difference = 0.0;
do {
saveMembershipMatrix(oldMatrix);
updateClusterCenters();
updateMembershipMatrix();
difference = calculateMaxMembershipChange(oldMatrix);
} while (difference > epsilon && ++iteration < max);
return clusters;
}
/**
* Update the cluster centers.
*/
private void updateClusterCenters() {
int j = 0;
final List<CentroidCluster<T>> newClusters = new ArrayList<CentroidCluster<T>>(k);
for (final CentroidCluster<T> cluster : clusters) {
final Clusterable center = cluster.getCenter();
int i = 0;
double[] arr = new double[center.getPoint().length];
double sum = 0.0;
for (final T point : points) {
final double u = FastMath.pow(membershipMatrix[i][j], fuzziness);
final double[] pointArr = point.getPoint();
for (int idx = 0; idx < arr.length; idx++) {
arr[idx] += u * pointArr[idx];
}
sum += u;
i++;
}
MathArrays.scaleInPlace(1.0 / sum, arr);
newClusters.add(new CentroidCluster<T>(new DoublePoint(arr)));
j++;
}
clusters.clear();
clusters = newClusters;
}
/**
* Updates the membership matrix and assigns the points to the cluster with
* the highest membership.
*/
private void updateMembershipMatrix() {
for (int i = 0; i < points.size(); i++) {
final T point = points.get(i);
double maxMembership = Double.MIN_VALUE;
int newCluster = -1;
for (int j = 0; j < clusters.size(); j++) {
double sum = 0.0;
final double distA = FastMath.abs(distance(point, clusters.get(j).getCenter()));
if (distA != 0.0) {
for (final CentroidCluster<T> c : clusters) {
final double distB = FastMath.abs(distance(point, c.getCenter()));
if (distB == 0.0) {
sum = Double.POSITIVE_INFINITY;
break;
}
sum += FastMath.pow(distA / distB, 2.0 / (fuzziness - 1.0));
}
}
double membership;
if (sum == 0.0) {
membership = 1.0;
} else if (sum == Double.POSITIVE_INFINITY) {
membership = 0.0;
} else {
membership = 1.0 / sum;
}
membershipMatrix[i][j] = membership;
if (membershipMatrix[i][j] > maxMembership) {
maxMembership = membershipMatrix[i][j];
newCluster = j;
}
}
clusters.get(newCluster).addPoint(point);
}
}
/**
* Initialize the membership matrix with random values.
*/
private void initializeMembershipMatrix() {
for (int i = 0; i < points.size(); i++) {
for (int j = 0; j < k; j++) {
membershipMatrix[i][j] = random.nextDouble();
}
membershipMatrix[i] = MathArrays.normalizeArray(membershipMatrix[i], 1.0);
}
}
/**
* Calculate the maximum element-by-element change of the membership matrix
* for the current iteration.
*
* @param matrix the membership matrix of the previous iteration
* @return the maximum membership matrix change
*/
private double calculateMaxMembershipChange(final double[][] matrix) {
double maxMembership = 0.0;
for (int i = 0; i < points.size(); i++) {
for (int j = 0; j < clusters.size(); j++) {
double v = FastMath.abs(membershipMatrix[i][j] - matrix[i][j]);
maxMembership = FastMath.max(v, maxMembership);
}
}
return maxMembership;
}
/**
* Copy the membership matrix into the provided matrix.
*
* @param matrix the place to store the membership matrix
*/
private void saveMembershipMatrix(final double[][] matrix) {
for (int i = 0; i < points.size(); i++) {
System.arraycopy(membershipMatrix[i], 0, matrix[i], 0, clusters.size());
}
}
}
| |
package ca.uhn.fhir.rest.server;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.dstu.resource.Patient;
import ca.uhn.fhir.rest.annotation.OptionalParam;
import ca.uhn.fhir.rest.annotation.RequiredParam;
import ca.uhn.fhir.rest.annotation.Search;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenOrListParam;
import ca.uhn.fhir.util.PortUtil;
import ca.uhn.fhir.util.UrlUtil;
public class TokenParameterTest {
private static CloseableHttpClient ourClient;
private static FhirContext ourCtx = FhirContext.forDstu1();
private static int ourPort;
private static Server ourServer;
private static TokenOrListParam ourLastOrList;
@Before
public void before() {
ourLastOrList = null;
}
/**
* Test #192
*/
@Test
public void testOrListWithEscapedValue1() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?identifier=" + UrlUtil.escape("system|code-include-but-not-end-with-comma\\,suffix"));
HttpResponse status = ourClient.execute(httpGet);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("system", ourLastOrList.getListAsCodings().get(0).getSystemElement().getValue());
assertEquals("code-include-but-not-end-with-comma,suffix", ourLastOrList.getListAsCodings().get(0).getCodeElement().getValue());
assertEquals(1, ourLastOrList.getListAsCodings().size());
}
/**
* Test #192
*/
@Test
public void testOrListWithEscapedValue2() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?identifier=" + UrlUtil.escape("system|code-include-end-with-comma\\,"));
HttpResponse status = ourClient.execute(httpGet);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(1, ourLastOrList.getListAsCodings().size());
assertEquals("system", ourLastOrList.getListAsCodings().get(0).getSystemElement().getValue());
assertEquals("code-include-end-with-comma,", ourLastOrList.getListAsCodings().get(0).getCodeElement().getValue());
}
/**
* Test #192
*/
@Test
public void testOrListWithEscapedValue3() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?identifier=" + UrlUtil.escape("system|code-include-end-with-comma1,system|code-include-end-with-comma2,,,,,"));
HttpResponse status = ourClient.execute(httpGet);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(2, ourLastOrList.getListAsCodings().size());
assertEquals("system", ourLastOrList.getListAsCodings().get(0).getSystemElement().getValue());
assertEquals("code-include-end-with-comma1", ourLastOrList.getListAsCodings().get(0).getCodeElement().getValue());
assertEquals("system", ourLastOrList.getListAsCodings().get(1).getSystemElement().getValue());
assertEquals("code-include-end-with-comma2", ourLastOrList.getListAsCodings().get(1).getCodeElement().getValue());
}
/**
* Test #192
*/
@Test
public void testOrListWithEscapedValue4() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?identifier=" + UrlUtil.escape("\\,\\,\\,value1\\,\\,\\,with\\,\\,\\,commas\\,\\,\\,,,,\\,\\,\\,value2\\,\\,\\,with\\,\\,\\,commas,,,\\,"));
HttpResponse status = ourClient.execute(httpGet);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(null, ourLastOrList.getListAsCodings().get(0).getSystemElement().getValue());
assertEquals(",,,value1,,,with,,,commas,,,", ourLastOrList.getListAsCodings().get(0).getCodeElement().getValue());
assertEquals(null, ourLastOrList.getListAsCodings().get(1).getSystemElement().getValue());
assertEquals(",,,value2,,,with,,,commas", ourLastOrList.getListAsCodings().get(1).getCodeElement().getValue());
assertEquals(null, ourLastOrList.getListAsCodings().get(2).getSystemElement().getValue());
assertEquals(",", ourLastOrList.getListAsCodings().get(2).getCodeElement().getValue());
assertEquals(3, ourLastOrList.getListAsCodings().size());
}
/**
* Test #192
*/
@Test
public void testOrListWithEscapedValue5() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?identifier=" + UrlUtil.escape("A\\\\,B,\\$"));
HttpResponse status = ourClient.execute(httpGet);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(null, ourLastOrList.getListAsCodings().get(0).getSystemElement().getValue());
assertEquals("A\\", ourLastOrList.getListAsCodings().get(0).getCodeElement().getValue());
assertEquals(null, ourLastOrList.getListAsCodings().get(1).getSystemElement().getValue());
assertEquals("B", ourLastOrList.getListAsCodings().get(1).getCodeElement().getValue());
assertEquals(null, ourLastOrList.getListAsCodings().get(2).getSystemElement().getValue());
assertEquals("$", ourLastOrList.getListAsCodings().get(2).getCodeElement().getValue());
assertEquals(3, ourLastOrList.getListAsCodings().size());
}
@AfterClass
public static void afterClass() throws Exception {
ourServer.stop();
}
@BeforeClass
public static void beforeClass() throws Exception {
ourPort = PortUtil.findFreePort();
ourServer = new Server(ourPort);
DummyPatientResourceProvider patientProvider = new DummyPatientResourceProvider();
ServletHandler proxyHandler = new ServletHandler();
RestfulServer servlet = new RestfulServer(ourCtx);
servlet.setFhirContext(ourCtx);
servlet.setResourceProviders(patientProvider);
ServletHolder servletHolder = new ServletHolder(servlet);
proxyHandler.addServletWithMapping(servletHolder, "/*");
ourServer.setHandler(proxyHandler);
ourServer.start();
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager);
ourClient = builder.build();
}
/**
* Created by dsotnikov on 2/25/2014.
*/
public static class DummyPatientResourceProvider implements IResourceProvider {
@Search
public List<Patient> findPatientByString(@RequiredParam(name = Patient.SP_IDENTIFIER) final TokenOrListParam theIdentifiers) {
ArrayList<Patient> retVal = new ArrayList<Patient>();
ourLastOrList = theIdentifiers;
return retVal;
}
@Search
public List<Patient> findPatientByStringParam(@RequiredParam(name = "str") StringParam theParam) {
ArrayList<Patient> retVal = new ArrayList<Patient>();
if (theParam.isExact() && theParam.getValue().equals("aaa")) {
Patient patient = new Patient();
patient.setId("1");
retVal.add(patient);
}
if (!theParam.isExact() && theParam.getValue().toLowerCase().equals("aaa")) {
Patient patient = new Patient();
patient.setId("2");
retVal.add(patient);
}
return retVal;
}
@Search
public List<Patient> findPatientWithOptional(@OptionalParam(name = "ccc") StringParam theParam) {
ArrayList<Patient> retVal = new ArrayList<Patient>();
if (theParam.isExact() && theParam.getValue().equals("aaa")) {
Patient patient = new Patient();
patient.setId("1");
retVal.add(patient);
}
if (!theParam.isExact() && theParam.getValue().toLowerCase().equals("aaa")) {
Patient patient = new Patient();
patient.setId("2");
retVal.add(patient);
}
return retVal;
}
@Override
public Class<? extends IResource> getResourceType() {
return Patient.class;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets.command;
import java.io.IOException;
import java.util.List;
import org.apache.geode.annotations.Immutable;
import org.apache.geode.cache.DynamicRegionFactory;
import org.apache.geode.cache.InterestResultPolicy;
import org.apache.geode.cache.operations.RegisterInterestOperationContext;
import org.apache.geode.internal.Version;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.tier.CachedRegionHelper;
import org.apache.geode.internal.cache.tier.Command;
import org.apache.geode.internal.cache.tier.InterestType;
import org.apache.geode.internal.cache.tier.MessageType;
import org.apache.geode.internal.cache.tier.sockets.BaseCommand;
import org.apache.geode.internal.cache.tier.sockets.ChunkedMessage;
import org.apache.geode.internal.cache.tier.sockets.Message;
import org.apache.geode.internal.cache.tier.sockets.Part;
import org.apache.geode.internal.cache.tier.sockets.ServerConnection;
import org.apache.geode.internal.security.AuthorizeRequest;
import org.apache.geode.internal.security.SecurityService;
import org.apache.geode.security.ResourcePermission.Operation;
import org.apache.geode.security.ResourcePermission.Resource;
/**
* All keys of the register interest list are being sent as a single part since 6.6. There is no
* need to send no keys as a separate part.In earlier versions {@link RegisterInterestList61} number
* of keys & each individual key was sent as a separate part.
*
* @since GemFire 6.6
*/
public class RegisterInterestList66 extends BaseCommand {
@Immutable
private static final RegisterInterestList66 singleton = new RegisterInterestList66();
public static Command getCommand() {
return singleton;
}
RegisterInterestList66() {}
@Override
public void cmdExecute(final Message clientMessage, final ServerConnection serverConnection,
final SecurityService securityService, long start) throws IOException, InterruptedException {
Part regionNamePart = null, keyPart = null;
String regionName = null;
Object key = null;
InterestResultPolicy policy;
List keys = null;
CachedRegionHelper crHelper = serverConnection.getCachedRegionHelper();
int numberOfKeys = 0, partNumber = 0;
serverConnection.setAsTrue(REQUIRES_RESPONSE);
serverConnection.setAsTrue(REQUIRES_CHUNKED_RESPONSE);
ChunkedMessage chunkedResponseMsg = serverConnection.getRegisterInterestResponseMessage();
// Retrieve the data from the message parts
regionNamePart = clientMessage.getPart(0);
regionName = regionNamePart.getCachedString();
// Retrieve the InterestResultPolicy
try {
policy = (InterestResultPolicy) clientMessage.getPart(1).getObject();
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
boolean isDurable = false;
try {
Part durablePart = clientMessage.getPart(2);
byte[] durablePartBytes = (byte[]) durablePart.getObject();
isDurable = durablePartBytes[0] == 0x01;
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
// region data policy
byte[] regionDataPolicyPartBytes;
boolean serializeValues = false;
try {
Part regionDataPolicyPart = clientMessage.getPart(clientMessage.getNumberOfParts() - 1);
regionDataPolicyPartBytes = (byte[]) regionDataPolicyPart.getObject();
if (serverConnection.getClientVersion().compareTo(Version.GFE_80) >= 0) {
// The second byte here is serializeValues
serializeValues = regionDataPolicyPartBytes[1] == (byte) 0x01;
}
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
partNumber = 3;
Part list = clientMessage.getPart(partNumber);
try {
keys = (List) list.getObject();
numberOfKeys = keys.size();
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
boolean sendUpdatesAsInvalidates = false;
try {
Part notifyPart = clientMessage.getPart(partNumber + 1);
byte[] notifyPartBytes = (byte[]) notifyPart.getObject();
sendUpdatesAsInvalidates = notifyPartBytes[0] == 0x01;
} catch (Exception e) {
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
if (logger.isDebugEnabled()) {
logger.debug(
"{}: Received register interest 66 request ({} bytes) from {} for the following {} keys in region {}: {}",
serverConnection.getName(), clientMessage.getPayloadLength(),
serverConnection.getSocketString(), numberOfKeys, regionName, keys);
}
// Process the register interest request
if (keys.isEmpty() || regionName == null) {
String errMessage = null;
if (keys.isEmpty() && regionName == null) {
errMessage =
"The input list of keys is empty and the input region name is null for the register interest request.";
} else if (keys.isEmpty()) {
errMessage =
"The input list of keys for the register interest request is empty.";
} else if (regionName == null) {
errMessage =
"The input region name for the register interest request is null.";
}
logger.warn("{}: {}", serverConnection.getName(), errMessage);
writeChunkedErrorResponse(clientMessage, MessageType.REGISTER_INTEREST_DATA_ERROR, errMessage,
serverConnection);
serverConnection.setAsTrue(RESPONDED);
}
// key not null
LocalRegion region = (LocalRegion) serverConnection.getCache().getRegion(regionName);
if (region == null) {
logger.info("{}: Region named {} was not found during register interest list request.",
new Object[] {serverConnection.getName(), regionName});
}
try {
securityService.authorize(Resource.DATA, Operation.READ, regionName);
AuthorizeRequest authzRequest = serverConnection.getAuthzRequest();
if (authzRequest != null) {
if (!DynamicRegionFactory.regionIsDynamicRegionList(regionName)) {
RegisterInterestOperationContext registerContext =
authzRequest.registerInterestListAuthorize(regionName, keys, policy);
keys = (List) registerContext.getKey();
}
}
// Register interest
serverConnection.getAcceptor().getCacheClientNotifier().registerClientInterest(regionName,
keys, serverConnection.getProxyID(), isDurable, sendUpdatesAsInvalidates, true,
regionDataPolicyPartBytes[0], true);
} catch (Exception ex) {
// If an interrupted exception is thrown , rethrow it
checkForInterrupt(serverConnection, ex);
// Otherwise, write an exception message and continue
writeChunkedException(clientMessage, ex, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
boolean isPrimary = serverConnection.getAcceptor().getCacheClientNotifier()
.getClientProxy(serverConnection.getProxyID()).isPrimary();
if (!isPrimary) {
chunkedResponseMsg.setMessageType(MessageType.RESPONSE_FROM_SECONDARY);
chunkedResponseMsg.setTransactionId(clientMessage.getTransactionId());
chunkedResponseMsg.sendHeader();
chunkedResponseMsg.setLastChunk(true);
if (logger.isDebugEnabled()) {
logger.debug(
"{}: Sending register interest response chunk from secondary for region: {} for key: {} chunk=<{}>",
serverConnection.getName(), regionName, key, chunkedResponseMsg);
}
chunkedResponseMsg.sendChunk(serverConnection);
} else { // isPrimary
// Send header which describes how many chunks will follow
chunkedResponseMsg.setMessageType(MessageType.RESPONSE_FROM_PRIMARY);
chunkedResponseMsg.setTransactionId(clientMessage.getTransactionId());
chunkedResponseMsg.sendHeader();
// Send chunk response
try {
fillAndSendRegisterInterestResponseChunks(region, keys, InterestType.KEY, serializeValues,
policy, serverConnection);
serverConnection.setAsTrue(RESPONDED);
} catch (Exception e) {
// If an interrupted exception is thrown , rethrow it
checkForInterrupt(serverConnection, e);
// otherwise send the exception back to client
writeChunkedException(clientMessage, e, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
if (logger.isDebugEnabled()) {
logger.debug(
"{}: Sent register interest response for the following {} keys in region {}: {}",
serverConnection.getName(), numberOfKeys, regionName, keys);
}
} // isPrimary
}
}
| |
package io.polyglotted.common.es.transport;
import com.google.common.collect.ImmutableMap;
import io.polyglotted.common.es.ElasticClient;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequest;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.ClearScrollResponse;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.reindex.DeleteByQueryAction;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import static io.polyglotted.common.es.ElasticException.checkState;
import static io.polyglotted.common.es.ElasticException.handleEx;
import static org.elasticsearch.action.support.IndicesOptions.lenientExpandOpen;
import static org.elasticsearch.client.Requests.refreshRequest;
import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;
import static org.elasticsearch.common.xcontent.XContentType.JSON;
@RequiredArgsConstructor
public class EsTransportClient implements ElasticClient {
private final Client internalClient;
@Override public void close() { internalClient.close(); }
@Override public boolean indexExists(String index) {
try {
return internalClient.admin().indices().exists(new IndicesExistsRequest(index)).actionGet().isExists();
} catch (Exception ex) { throw handleEx("indexExists failed", ex); }
}
@Override public boolean typeExists(String index, String... types) {
try {
return internalClient.admin().indices().typesExists(new TypesExistsRequest(new String[]{index}, types)).actionGet().isExists();
} catch (Exception ex) { throw handleEx("typeExists failed", ex); }
}
@Override public Set<String> getIndices(String alias) {
Set<String> indices = new HashSet<>();
Iterator<String> indexIt = getMeta(alias).getIndices().keysIt();
while (indexIt.hasNext()) { indices.add(indexIt.next()); }
return indices;
}
@Override @SneakyThrows(IOException.class) public String getIndexMeta(String... indices) {
MetaData indexMetaDatas = getMeta(indices);
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startArray();
ImmutableOpenMap<String, IndexMetaData> getIndices = indexMetaDatas.getIndices();
Iterator<String> indexIt = getIndices.keysIt();
while (indexIt.hasNext()) {
String index = indexIt.next();
IndexMetaData metaData = getIndices.get(index);
builder.startObject();
builder.startObject(index);
builder.startObject("aliases");
ImmutableOpenMap<String, AliasMetaData> aliases = metaData.getAliases();
Iterator<String> aIt = aliases.keysIt();
while (aIt.hasNext()) {
AliasMetaData alias = aliases.get(aIt.next());
AliasMetaData.Builder.toXContent(alias, builder, EMPTY_PARAMS);
}
builder.endObject();
builder.startObject("mappings");
ImmutableOpenMap<String, MappingMetaData> mappings = metaData.getMappings();
Iterator<String> mIt = mappings.keysIt();
while (mIt.hasNext()) {
String type = mIt.next();
builder.field(type).map(mappings.get(type).getSourceAsMap());
}
builder.endObject();
builder.startObject("settings");
Settings settings = metaData.getSettings();
settings.toXContent(builder, EMPTY_PARAMS);
builder.endObject();
builder.endObject();
builder.endObject();
}
builder.endArray();
return builder.string();
}
@Override @SneakyThrows(IOException.class) public String getSettings(String... indices) {
MetaData indexMetaDatas = getMeta(indices);
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
ImmutableOpenMap<String, IndexMetaData> getIndices = indexMetaDatas.getIndices();
Iterator<String> indexIt = getIndices.keysIt();
while (indexIt.hasNext()) {
String index = indexIt.next();
IndexMetaData metaData = getIndices.get(index);
builder.startObject(index).startObject("settings");
Settings settings = metaData.getSettings();
settings.toXContent(builder, EMPTY_PARAMS);
builder.endObject().endObject();
}
builder.endObject();
return builder.string();
}
@Override @SneakyThrows(IOException.class) public String getMapping(String index, String type) {
ImmutableOpenMap<String, IndexMetaData> getIndices = getMeta(index).getIndices();
Iterator<String> indexIt = getIndices.keysIt();
while (indexIt.hasNext()) {
ImmutableOpenMap<String, MappingMetaData> mappings = getIndices.get(indexIt.next()).getMappings();
Iterator<String> mIt = mappings.keysIt();
while (mIt.hasNext()) {
if (type.equals(mIt.next())) { return mappings.get(type).source().string(); }
}
}
return null;
}
@Override public void openIndex(String... indices) {
try {
checkState(internalClient.admin().indices().prepareOpen(indices).execute().actionGet().isAcknowledged(),
"unable to open " + Arrays.toString(indices));
} catch (Exception ex) { throw handleEx("openIndex failed", ex); }
}
@Override public void closeIndex(String... indices) {
try {
checkState(internalClient.admin().indices().prepareClose(indices).execute().actionGet().isAcknowledged(),
"unable to open " + Arrays.toString(indices));
} catch (Exception ex) { throw handleEx("openIndex failed", ex); }
}
private MetaData getMeta(String... indices) {
try {
return internalClient.admin().cluster().prepareState().setIndices(indices).execute().actionGet().getState().metaData();
} catch (Exception ex) { throw handleEx("getMeta failed", ex); }
}
@Override public void createIndex(CreateIndexRequest request) {
try {
checkState(internalClient.admin().indices().create(request).actionGet().isAcknowledged(), "unable to create index for " + request.index());
} catch (Exception ex) { throw handleEx("createIndex failed", ex); }
}
@Override public void createIndex(String index, String resource) {
createIndex(new CreateIndexRequest(index).source(resource, JSON));
}
@Override public void updateAlias(IndicesAliasesRequest request) {
try {
checkState(internalClient.admin().indices().aliases(request).actionGet().isAcknowledged(), "unable to update aliases");
} catch (Exception ex) { throw handleEx("updateAlias failed", ex); }
}
@Override public void updateSettings(UpdateSettingsRequest request) {
try {
checkState(internalClient.admin().indices().updateSettings(request).actionGet().isAcknowledged(), "unable to update settings");
} catch (Exception ex) { throw handleEx("updateSettings failed", ex); }
}
@Override public void putMapping(PutMappingRequest request) {
try {
checkState(internalClient.admin().indices().putMapping(request).actionGet().isAcknowledged(), "could not put mapping for " + request.type());
} catch (Exception ex) { throw handleEx("putMapping failed", ex); }
}
@Override public void forceRefresh(String... indices) {
try {
internalClient.admin().indices().refresh(refreshRequest(indices)).actionGet();
} catch (Exception ex) { throw handleEx("refresh failed", ex); }
}
@Override public void dropIndex(String... indices) {
try {
checkState(internalClient.admin().indices().delete(new DeleteIndexRequest(indices).indicesOptions(lenientExpandOpen()))
.actionGet().isAcknowledged(), "Could not clear one or more index " + Arrays.toString(indices));
} catch (Exception ex) { throw handleEx("dropIndex failed", ex); }
}
@Override public void waitForStatus(String status) {
try {
ClusterHealthResponse clusterHealth = internalClient.admin().cluster().prepareHealth().setWaitForNoRelocatingShards(true)
.setWaitForStatus(ClusterHealthStatus.fromString(status)).execute().actionGet();
checkState(clusterHealth.getStatus() != ClusterHealthStatus.RED, "cluster has errors");
} catch (Exception ex) { throw handleEx("waitForStatus failed", ex); }
}
@Override public Map<String, Object> clusterHealth() {
try {
ClusterHealthResponse health = internalClient.admin().cluster().health(new ClusterHealthRequest()).actionGet();
return ImmutableMap.<String, Object>builder()
.put("cluster_name", health.getClusterName())
.put("status", health.getStatus().name().toLowerCase(Locale.ROOT))
.put("timed_out", health.isTimedOut())
.put("number_of_nodes", health.getNumberOfNodes())
.put("number_of_data_nodes", health.getNumberOfDataNodes())
.put("active_primary_shards", health.getActivePrimaryShards())
.put("active_shards", health.getActiveShards())
.put("relocating_shards", health.getRelocatingShards())
.put("initializing_shards", health.getInitializingShards())
.put("unassigned_shards", health.getUnassignedShards())
.put("delayed_unassigned_shards", health.getDelayedUnassignedShards())
.put("number_of_pending_tasks", health.getNumberOfPendingTasks())
.put("number_of_in_flight_fetch", health.getNumberOfInFlightFetch())
.put("task_max_waiting_in_queue_millis", health.getTaskMaxWaitingTime().millis() == 0 ? "-" : health.getTaskMaxWaitingTime().getStringRep())
.put("active_shards_percent_as_number", String.format(Locale.ROOT, "%1.1f%%", health.getActiveShardsPercent()))
.build();
} catch (Exception ex) { throw handleEx("clusterHealth failed", ex); }
}
@Override public void buildPipeline(String id, String json) {
try {
checkState(internalClient.admin().cluster().preparePutPipeline(id, new BytesArray(json), JSON)
.execute().actionGet().isAcknowledged(), "unable to build pipeline");
} catch (Exception ex) { throw handleEx("buildPipeline failed", ex); }
}
@Override public boolean pipelineExists(String id) {
try {
return internalClient.admin().cluster().prepareGetPipeline(id).execute().actionGet().isFound();
} catch (Exception ex) { throw handleEx("pipelineExists failed", ex); }
}
@Override public void deletePipeline(String id) {
try {
checkState(internalClient.admin().cluster().prepareDeletePipeline(id).execute()
.actionGet().isAcknowledged(), "unable to delete pipeline");
} catch (Exception ex) { throw handleEx("deletePipeline failed", ex); }
}
@Override public void putTemplate(String name, String body) {
try {
checkState(internalClient.admin().indices().preparePutTemplate(name).setSource(new BytesArray(body), JSON)
.execute().actionGet().isAcknowledged(), "unable to put template");
} catch (Exception ex) { throw handleEx("putTemplate failed", ex); }
}
@Override public boolean templateExists(String name) {
try {
return !internalClient.admin().indices().prepareGetTemplates(name).execute().actionGet()
.getIndexTemplates().isEmpty();
} catch (Exception ex) { throw handleEx("templateExists failed", ex); }
}
@Override public void deleteTemplate(String name) {
try {
checkState(internalClient.admin().indices().prepareDeleteTemplate(name)
.execute().actionGet().isAcknowledged(), "unable to delete template");
} catch (Exception ex) { throw handleEx("deleteTemplate failed", ex); }
}
@Override public IndexResponse index(IndexRequest request) {
try { return internalClient.index(request).actionGet(); } catch (Exception ex) { throw handleEx("index failed", ex); }
}
@Override public UpdateResponse update(UpdateRequest request) {
try { return internalClient.update(request).actionGet(); } catch (Exception ex) { throw handleEx("update failed", ex); }
}
@Override public DeleteResponse delete(DeleteRequest request) {
try { return internalClient.delete(request).actionGet(); } catch (Exception ex) { throw handleEx("delete failed", ex); }
}
@Override public BulkResponse bulk(BulkRequest request) {
try { return internalClient.bulk(request).actionGet(); } catch (Exception ex) { throw handleEx("bulk failed", ex); }
}
@Override public void bulkAsync(BulkRequest request, ActionListener<BulkResponse> listener) {
try { internalClient.bulk(request, listener); } catch (Exception ex) { throw handleEx("bulkAsync failed", ex); }
}
@Override public GetResponse get(GetRequest request) {
try { return internalClient.get(request).actionGet(); } catch (Exception ex) { throw handleEx("get failed", ex); }
}
@Override public MultiGetResponse multiGet(MultiGetRequest request) {
try { return internalClient.multiGet(request).actionGet(); } catch (Exception ex) { throw handleEx("multiGet failed", ex); }
}
@Override public SearchResponse search(SearchRequest request) {
try { return internalClient.search(request).actionGet(); } catch (Exception ex) { throw handleEx("search failed", ex); }
}
@Override public MultiSearchResponse multiSearch(MultiSearchRequest request) {
try { return internalClient.multiSearch(request).actionGet(); } catch (Exception ex) { throw handleEx("multiSearch failed", ex); }
}
@Override public SearchResponse searchScroll(SearchScrollRequest request) {
try { return internalClient.searchScroll(request).actionGet(); } catch (Exception ex) { throw handleEx("searchScroll failed", ex); }
}
@Override public ClearScrollResponse clearScroll(ClearScrollRequest request) {
try { return internalClient.clearScroll(request).actionGet(); } catch (Exception ex) { throw handleEx("clearScroll failed", ex); }
}
@Override public long deleteByQuery(String index, QueryBuilder query) {
try {
return DeleteByQueryAction.INSTANCE.newRequestBuilder(internalClient).source(index).filter(query).get().getDeleted();
} catch (Exception ex) { throw handleEx("clearScroll failed", ex); }
}
@Override public String simpleGet(String endpoint, String methodName) {
throw new UnsupportedOperationException();
}
@Override public String simplePost(String endpoint, String body, String methodName) {
throw new UnsupportedOperationException();
}
@Override public String simplePut(String endpoint, String body, String methodName) {
throw new UnsupportedOperationException();
}
@Override public void simpleDelete(String endpoint, String methodName) {
throw new UnsupportedOperationException();
}
}
| |
package ba.jamax.util.rest.dao;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.hibernate.Criteria;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import ba.jamax.util.rest.model.BaseEntity;
import ba.jamax.util.rest.model.Filter;
import ba.jamax.util.rest.model.FilterRule;
import ba.jamax.util.rest.model.Rule;
import ba.jamax.util.rest.service.GenericServiceImpl;
import ba.jamax.util.rest.util.GenericUtils;
import ba.jamax.util.rest.util.TypeUtils;
public abstract class GenericDAOImpl<T extends BaseEntity> implements GenericDAO<T> {
private final Logger logger = LoggerFactory.getLogger(GenericServiceImpl.class);
private static final String UNKNOWN = "Unknown";
private final Map<String, String> aliasMap = new HashMap<String, String>();
@Autowired
private SessionFactory sessionFactory;
private Class<T> entityClass;
private GenericUtils<T> utils = new GenericUtils<T>();
private TypeUtils typeUtils = new TypeUtils();
public SessionFactory getSessionFactory() {
return sessionFactory;
}
public void setSessionFactory(SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
}
@SuppressWarnings("unchecked")
public GenericDAOImpl() {
ParameterizedType genericSuperclass = (ParameterizedType) getClass().getGenericSuperclass();
this.entityClass = (Class<T>) genericSuperclass.getActualTypeArguments()[0];
}
@Autowired
public void init(SessionFactory factory) {
setSessionFactory(factory);
}
@SuppressWarnings("unchecked")
public T findById(Serializable id) {
return (T) getSessionFactory().getCurrentSession().get(entityClass, id);
}
public T addNew(T t) {
t.setCreated(new Date());
t.setCreatedBy(getUsernameInSession());
t.setModified(new Date());
t.setModifiedBy(getUsernameInSession());
getSessionFactory().getCurrentSession().saveOrUpdate(t);
return t;
}
public Collection<T> addAll(Collection<T> tList){
Date currentDate = new Date();
String currentUser = getUsernameInSession();
for (T t : tList) {
t.setCreated(currentDate);
t.setCreatedBy(currentUser);
getSessionFactory().getCurrentSession().saveOrUpdate(t);
}
return tList;
}
public void delete(T t){
getSessionFactory().getCurrentSession().delete(t);
}
public void update(T t){
t.setModified(new Date());
t.setModifiedBy(getUsernameInSession());
getSessionFactory().getCurrentSession().update(t);
}
@SuppressWarnings("unchecked")
public List<T> findByCriteria(final Map<String, Object> criterias,
final Filter filter, final boolean strict,
final int firstResult, final int maxResults, final Order order) {
Criteria criteria = getSessionFactory().getCurrentSession().createCriteria(entityClass);
criteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY);
criteria.setFirstResult(firstResult);
criteria.setMaxResults(maxResults);
createCriteria(criterias, filter, strict, criteria, order);
return criteria.list();
}
@SuppressWarnings("unchecked")
public int countByCriteria(final Map<String, Object> criteriaMap,
final Filter filter, final boolean strict) {
Criteria criteria = getSessionFactory().getCurrentSession().createCriteria(entityClass);
createCriteria(criteriaMap, filter, strict, criteria, null);
criteria.setProjection(Projections.rowCount());
List<Number> counts = criteria.list();
return getSumOfCounts(counts);
}
private int getSumOfCounts(final List<Number> counts) {
int totalCounts = 0;
if(counts != null && counts.size() > 0) {
for(Number count : counts) {
totalCounts += count.intValue();
}
}
return totalCounts;
}
private void addAliasesToCriteria(final String key, final Criteria criteria) {
if (key != null && key.contains(".")) {
if (key.contains(",")) {
String[] keys = key.split(",");
for (String k : keys) {
addAliasesToCriteria(k, criteria);
}
} else {
// we have relation: entity.someOtherEntity.moreEntity.id
String[] entities = key.split("\\.");
if (entities != null && entities.length > 1) {
// create the first one without the dot
createAlias(entities[0], entities[0], criteria);
createAliasesRecursive(criteria, entities, 0);
}
}
}
}
private void createAliasesRecursive(final Criteria criteria, String[] entities, int counter) {
if((counter+2) >= entities.length) {
return;
}
counter++;
createAlias(entities[counter]+"."+entities[counter+1], entities[counter+1], criteria);
createAliasesRecursive(criteria, entities, counter);
}
private void createAlias(final String key, final String value, final Criteria criteria) {
String aValue = aliasMap.get(key);
if(aValue == null) {
// add only if it doesnt exist
criteria.createAlias(key, value); // inner join by default
aliasMap.put(key, value);
}
}
private void addRestrictionsToCriteria(final Map<String, Object> criterias,
final boolean strict, final Criteria criteria) {
for (Entry<String, Object> entry : criterias.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
addAliasesToCriteria(key, criteria);
if (value instanceof String) {
if (strict) {
criteria.add(Restrictions.eq(key, value));
} else {
criteria.add(Restrictions.ilike(key, (String) value, MatchMode.ANYWHERE));
}
} else {
criteria.add(Restrictions.eq(key, value));
}
}
}
protected void createCriteria(final Map<String, Object> criterias,
final Filter filter, final boolean strict,
final Criteria criteria, final Order order) {
aliasMap.clear();
if(order != null) {
addAliasesToCriteria(order.getPropertyName(), criteria);
}
addRestrictionsToCriteria(criterias, strict, criteria);
addFiltersToCriteria(filter, criteria);
if (order != null) {
if (order.getPropertyName().contains(",")) {
String[] keys = order.getPropertyName().split(",");
for (String k : keys) {
criteria.addOrder(Order.asc(k.trim()));
}
} else {
criteria.addOrder(order);
}
}
}
private void addFiltersToCriteria(final Filter filter, final Criteria criteria) {
// "filter": "{"groupOp":"AND","rules":[
// "rule":{"field":"location","op":"cn","data":"wi"},
// "rule":{"field":"maker","op":"cn","data":"test"},
// "filter":{"groupOp":"AND","rules":[{"field":"location","op":"cn","data":"wi"},
// {"field":"maker","op":"cn","data":"test"}]}
// ]}"
if(filter!=null) {
List<? extends FilterRule> rules = filter.getRules();
String groupOp = filter.getGroupOp();
Criterion criterion = null;
String data, field;
Object dataObj = null;
for (FilterRule filterRule : rules) {
try {
if(filterRule instanceof Rule) {
Rule rule = (Rule) filterRule;
data = rule.getData();
field = rule.getField();
Method m = this.utils.getGetter(this.entityClass, field);
dataObj = typeUtils.getCorrectObjectType(m, data);
if(criterion == null) {
criterion = decodeOp(rule.getOp(),field,dataObj);
} else if(groupOp.equals("AND")) {
criterion = Restrictions.and(criterion, decodeOp(rule.getOp(),field,dataObj));
} else if(groupOp.equals("OR")) {
criterion = Restrictions.or(criterion, decodeOp(rule.getOp(),field,dataObj));
}
} else if(filterRule instanceof Filter) {
Filter filter2 = (Filter) filterRule;
addFiltersToCriteria(filter2, criteria);
}
} catch (Exception e) {
logger.error("Could not create criterion",e);
}
}
if(criterion != null) {
criteria.add(criterion);
}
}
}
protected String getUsernameInSession() {
String username = UNKNOWN;
try {
Authentication auth = SecurityContextHolder.getContext().getAuthentication();
username = auth.getName(); //get logged in username
} catch (Exception e) {
logger.info("Could not get username");
}
return username;
}
/**
* All available option are:
* 'eq' => 'equal'
* 'ne' => 'not equal'
* 'lt' => 'less'
* 'le' => 'less or equal'
* 'gt' => 'greater'
* 'ge' => 'greater or equal'
* 'bw' => 'begins with'
* 'bn' => 'does not begin with'
* 'in' => 'is in'
* 'ni' => 'is not in'
* 'ew' => 'ends with'
* 'en' => 'does not end with'
* 'cn' => 'contains'
* 'nc' => 'does not contain'
* 'nu' => 'is null'
* 'nn' => 'not null'
* Note that the elements in sopt array can be mixed in any order.
* @param op
* @return
* @throws Exception
*/
private Criterion decodeOp(final String op, final String propertyName,
final Object value) throws Exception {
if(op.equals("eq")) {
return Restrictions.eq(propertyName, value);
} else if(op.equals("ne")) {
return Restrictions.ne(propertyName, value);
} else if(op.equals("lt")) {
return Restrictions.lt(propertyName, value);
} else if(op.equals("le")) {
return Restrictions.le(propertyName, value);
} else if(op.equals("gt")) {
return Restrictions.gt(propertyName, value);
} else if(op.equals("ge")) {
return Restrictions.ge(propertyName, value);
} else if(op.equals("bw")) {
return Restrictions.ilike(propertyName, value.toString(), MatchMode.START);
} else if(op.equals("bn")) {
return Restrictions.not(Restrictions.ilike(propertyName, value.toString(), MatchMode.START));
} else if(op.equals("in")) {
return Restrictions.in(propertyName, value.toString().split(" "));
} else if(op.equals("ni")) {
return Restrictions.not(Restrictions.in(propertyName, value.toString().split(" ")));
} else if(op.equals("ew")) {
return Restrictions.ilike(propertyName, value.toString(), MatchMode.END);
} else if(op.equals("en")) {
return Restrictions.not(Restrictions.ilike(propertyName, value.toString(), MatchMode.END));
} else if(op.equals("cn")) {
return Restrictions.ilike(propertyName, value.toString(), MatchMode.ANYWHERE);
} else if(op.equals("nc")) {
return Restrictions.not(Restrictions.ilike(propertyName, value.toString(), MatchMode.ANYWHERE));
} else if(op.equals("nu")) {
return Restrictions.isNull(propertyName);
} else if(op.equals("nn")) {
return Restrictions.isNotNull(propertyName);
}
throw new Exception("Unknown op: \"" + op + "\"");
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.server.testing;
import com.facebook.airlift.bootstrap.Bootstrap;
import com.facebook.airlift.bootstrap.LifeCycleManager;
import com.facebook.airlift.discovery.client.Announcer;
import com.facebook.airlift.discovery.client.DiscoveryModule;
import com.facebook.airlift.discovery.client.ServiceAnnouncement;
import com.facebook.airlift.discovery.client.ServiceSelectorManager;
import com.facebook.airlift.discovery.client.testing.TestingDiscoveryModule;
import com.facebook.airlift.event.client.EventModule;
import com.facebook.airlift.http.server.TheServlet;
import com.facebook.airlift.http.server.testing.TestingHttpServer;
import com.facebook.airlift.http.server.testing.TestingHttpServerModule;
import com.facebook.airlift.jaxrs.JaxrsModule;
import com.facebook.airlift.jmx.testing.TestingJmxModule;
import com.facebook.airlift.json.JsonModule;
import com.facebook.airlift.json.smile.SmileModule;
import com.facebook.airlift.node.testing.TestingNodeModule;
import com.facebook.airlift.tracetoken.TraceTokenModule;
import com.facebook.drift.server.DriftServer;
import com.facebook.drift.transport.netty.server.DriftNettyServerTransport;
import com.facebook.presto.connector.ConnectorManager;
import com.facebook.presto.cost.StatsCalculator;
import com.facebook.presto.dispatcher.DispatchManager;
import com.facebook.presto.dispatcher.QueryPrerequisitesManagerModule;
import com.facebook.presto.eventlistener.EventListenerManager;
import com.facebook.presto.execution.QueryInfo;
import com.facebook.presto.execution.QueryManager;
import com.facebook.presto.execution.SqlQueryManager;
import com.facebook.presto.execution.StateMachine.StateChangeListener;
import com.facebook.presto.execution.TaskManager;
import com.facebook.presto.execution.resourceGroups.InternalResourceGroupManager;
import com.facebook.presto.execution.resourceGroups.ResourceGroupManager;
import com.facebook.presto.memory.ClusterMemoryManager;
import com.facebook.presto.memory.LocalMemoryManager;
import com.facebook.presto.metadata.AllNodes;
import com.facebook.presto.metadata.CatalogManager;
import com.facebook.presto.metadata.InternalNode;
import com.facebook.presto.metadata.InternalNodeManager;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.security.AccessControl;
import com.facebook.presto.security.AccessControlManager;
import com.facebook.presto.server.GracefulShutdownHandler;
import com.facebook.presto.server.PluginManager;
import com.facebook.presto.server.ServerMainModule;
import com.facebook.presto.server.ShutdownAction;
import com.facebook.presto.server.security.ServerSecurityModule;
import com.facebook.presto.spi.ConnectorId;
import com.facebook.presto.spi.Plugin;
import com.facebook.presto.spi.QueryId;
import com.facebook.presto.spi.eventlistener.EventListener;
import com.facebook.presto.spi.memory.ClusterMemoryPoolManager;
import com.facebook.presto.split.PageSourceManager;
import com.facebook.presto.split.SplitManager;
import com.facebook.presto.sql.analyzer.FeaturesConfig;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.parser.SqlParserOptions;
import com.facebook.presto.sql.planner.ConnectorPlanOptimizerManager;
import com.facebook.presto.sql.planner.NodePartitioningManager;
import com.facebook.presto.sql.planner.Plan;
import com.facebook.presto.storage.TempStorageManager;
import com.facebook.presto.testing.ProcedureTester;
import com.facebook.presto.testing.TestingAccessControlManager;
import com.facebook.presto.testing.TestingEventListenerManager;
import com.facebook.presto.testing.TestingTempStorageManager;
import com.facebook.presto.testing.TestingWarningCollectorModule;
import com.facebook.presto.transaction.TransactionManager;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.net.HostAndPort;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.Scopes;
import org.weakref.jmx.guice.MBeanModule;
import javax.annotation.concurrent.GuardedBy;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import java.io.Closeable;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.URI;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import static com.facebook.airlift.configuration.ConditionalModule.installModuleIf;
import static com.facebook.airlift.discovery.client.ServiceAnnouncement.serviceAnnouncement;
import static com.facebook.airlift.json.JsonBinder.jsonBinder;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.nullToEmpty;
import static com.google.common.base.Throwables.throwIfUnchecked;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static com.google.inject.multibindings.Multibinder.newSetBinder;
import static java.lang.Integer.parseInt;
import static java.nio.file.Files.createTempDirectory;
import static java.nio.file.Files.isDirectory;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
public class TestingPrestoServer
implements Closeable
{
private final Injector injector;
private final Path baseDataDir;
private final boolean preserveData;
private final LifeCycleManager lifeCycleManager;
private final PluginManager pluginManager;
private final ConnectorManager connectorManager;
private final TestingHttpServer server;
private final CatalogManager catalogManager;
private final TransactionManager transactionManager;
private final SqlParser sqlParser;
private final Metadata metadata;
private final StatsCalculator statsCalculator;
private final TestingEventListenerManager eventListenerManager;
private final TestingAccessControlManager accessControl;
private final ProcedureTester procedureTester;
private final Optional<InternalResourceGroupManager<?>> resourceGroupManager;
private final SplitManager splitManager;
private final PageSourceManager pageSourceManager;
private final NodePartitioningManager nodePartitioningManager;
private final ConnectorPlanOptimizerManager planOptimizerManager;
private final ClusterMemoryPoolManager clusterMemoryManager;
private final LocalMemoryManager localMemoryManager;
private final InternalNodeManager nodeManager;
private final ServiceSelectorManager serviceSelectorManager;
private final Announcer announcer;
private final DispatchManager dispatchManager;
private final QueryManager queryManager;
private final TaskManager taskManager;
private final GracefulShutdownHandler gracefulShutdownHandler;
private final ShutdownAction shutdownAction;
private final RequestBlocker requestBlocker;
private final boolean resourceManager;
private final boolean coordinator;
public static class TestShutdownAction
implements ShutdownAction
{
private final CountDownLatch shutdownCalled = new CountDownLatch(1);
@GuardedBy("this")
private boolean isShutdown;
@Override
public synchronized void onShutdown()
{
isShutdown = true;
shutdownCalled.countDown();
}
public void waitForShutdownComplete(long millis)
throws InterruptedException
{
shutdownCalled.await(millis, MILLISECONDS);
}
public synchronized boolean isShutdown()
{
return isShutdown;
}
}
public TestingPrestoServer()
throws Exception
{
this(ImmutableList.of());
}
public TestingPrestoServer(List<Module> additionalModules)
throws Exception
{
this(true, ImmutableMap.of(), null, null, new SqlParserOptions(), additionalModules);
}
public TestingPrestoServer(
boolean coordinator,
Map<String, String> properties,
String environment,
URI discoveryUri,
SqlParserOptions parserOptions,
List<Module> additionalModules)
throws Exception
{
this(coordinator, properties, environment, discoveryUri, parserOptions, additionalModules, Optional.empty());
}
public TestingPrestoServer(
boolean coordinator,
Map<String, String> properties,
String environment,
URI discoveryUri,
SqlParserOptions parserOptions,
List<Module> additionalModules,
Optional<Path> baseDataDir)
throws Exception
{
this(false, false, coordinator, properties, environment, discoveryUri, parserOptions, additionalModules, baseDataDir);
}
public TestingPrestoServer(
boolean resourceManager,
boolean resourceManagerEnabled,
boolean coordinator,
Map<String, String> properties,
String environment,
URI discoveryUri,
SqlParserOptions parserOptions,
List<Module> additionalModules,
Optional<Path> baseDataDir)
throws Exception
{
this.resourceManager = resourceManager;
this.coordinator = coordinator;
this.baseDataDir = baseDataDir.orElseGet(TestingPrestoServer::tempDirectory);
this.preserveData = baseDataDir.isPresent();
properties = new HashMap<>(properties);
String coordinatorPort = properties.remove("http-server.http.port");
if (coordinatorPort == null) {
coordinatorPort = "0";
}
Map<String, String> serverProperties = getServerProperties(resourceManagerEnabled, properties, environment, discoveryUri);
ImmutableList.Builder<Module> modules = ImmutableList.<Module>builder()
.add(new TestingNodeModule(Optional.ofNullable(environment)))
.add(new TestingHttpServerModule(parseInt(coordinator ? coordinatorPort : "0")))
.add(new JsonModule())
.add(installModuleIf(
FeaturesConfig.class,
FeaturesConfig::isJsonSerdeCodeGenerationEnabled,
binder -> jsonBinder(binder).addModuleBinding().to(AfterburnerModule.class)))
.add(new SmileModule())
.add(new JaxrsModule(true))
.add(new MBeanModule())
.add(new TestingJmxModule())
.add(new EventModule())
.add(new TraceTokenModule())
.add(new ServerSecurityModule())
.add(new ServerMainModule(parserOptions))
.add(new TestingWarningCollectorModule())
.add(new QueryPrerequisitesManagerModule())
.add(binder -> {
binder.bind(TestingAccessControlManager.class).in(Scopes.SINGLETON);
binder.bind(TestingEventListenerManager.class).in(Scopes.SINGLETON);
binder.bind(TestingTempStorageManager.class).in(Scopes.SINGLETON);
binder.bind(AccessControlManager.class).to(TestingAccessControlManager.class).in(Scopes.SINGLETON);
binder.bind(EventListenerManager.class).to(TestingEventListenerManager.class).in(Scopes.SINGLETON);
binder.bind(TempStorageManager.class).to(TestingTempStorageManager.class).in(Scopes.SINGLETON);
binder.bind(AccessControl.class).to(AccessControlManager.class).in(Scopes.SINGLETON);
binder.bind(ShutdownAction.class).to(TestShutdownAction.class).in(Scopes.SINGLETON);
binder.bind(GracefulShutdownHandler.class).in(Scopes.SINGLETON);
binder.bind(ProcedureTester.class).in(Scopes.SINGLETON);
binder.bind(RequestBlocker.class).in(Scopes.SINGLETON);
newSetBinder(binder, Filter.class, TheServlet.class).addBinding()
.to(RequestBlocker.class).in(Scopes.SINGLETON);
});
if (discoveryUri != null) {
requireNonNull(environment, "environment required when discoveryUri is present");
modules.add(new DiscoveryModule());
}
else {
modules.add(new TestingDiscoveryModule());
}
modules.addAll(additionalModules);
Bootstrap app = new Bootstrap(modules.build());
Map<String, String> optionalProperties = new HashMap<>();
if (environment != null) {
optionalProperties.put("node.environment", environment);
}
injector = app
.doNotInitializeLogging()
.setRequiredConfigurationProperties(serverProperties)
.setOptionalConfigurationProperties(optionalProperties)
.quiet()
.initialize();
injector.getInstance(Announcer.class).start();
lifeCycleManager = injector.getInstance(LifeCycleManager.class);
pluginManager = injector.getInstance(PluginManager.class);
connectorManager = injector.getInstance(ConnectorManager.class);
server = injector.getInstance(TestingHttpServer.class);
catalogManager = injector.getInstance(CatalogManager.class);
transactionManager = injector.getInstance(TransactionManager.class);
sqlParser = injector.getInstance(SqlParser.class);
metadata = injector.getInstance(Metadata.class);
accessControl = injector.getInstance(TestingAccessControlManager.class);
procedureTester = injector.getInstance(ProcedureTester.class);
splitManager = injector.getInstance(SplitManager.class);
pageSourceManager = injector.getInstance(PageSourceManager.class);
if (coordinator) {
dispatchManager = injector.getInstance(DispatchManager.class);
queryManager = injector.getInstance(QueryManager.class);
ResourceGroupManager<?> resourceGroupManager = injector.getInstance(ResourceGroupManager.class);
this.resourceGroupManager = resourceGroupManager instanceof InternalResourceGroupManager
? Optional.of((InternalResourceGroupManager<?>) resourceGroupManager)
: Optional.empty();
nodePartitioningManager = injector.getInstance(NodePartitioningManager.class);
planOptimizerManager = injector.getInstance(ConnectorPlanOptimizerManager.class);
clusterMemoryManager = injector.getInstance(ClusterMemoryManager.class);
statsCalculator = injector.getInstance(StatsCalculator.class);
eventListenerManager = ((TestingEventListenerManager) injector.getInstance(EventListenerManager.class));
}
else if (resourceManager) {
dispatchManager = null;
queryManager = injector.getInstance(QueryManager.class);
resourceGroupManager = Optional.empty();
nodePartitioningManager = injector.getInstance(NodePartitioningManager.class);
planOptimizerManager = injector.getInstance(ConnectorPlanOptimizerManager.class);
clusterMemoryManager = null;
statsCalculator = null;
eventListenerManager = ((TestingEventListenerManager) injector.getInstance(EventListenerManager.class));
}
else {
dispatchManager = null;
queryManager = null;
resourceGroupManager = Optional.empty();
nodePartitioningManager = null;
planOptimizerManager = null;
clusterMemoryManager = null;
statsCalculator = null;
eventListenerManager = null;
}
localMemoryManager = injector.getInstance(LocalMemoryManager.class);
nodeManager = injector.getInstance(InternalNodeManager.class);
serviceSelectorManager = injector.getInstance(ServiceSelectorManager.class);
gracefulShutdownHandler = injector.getInstance(GracefulShutdownHandler.class);
taskManager = injector.getInstance(TaskManager.class);
shutdownAction = injector.getInstance(ShutdownAction.class);
announcer = injector.getInstance(Announcer.class);
requestBlocker = injector.getInstance(RequestBlocker.class);
// Announce Thrift server address
DriftServer driftServer = injector.getInstance(DriftServer.class);
driftServer.start();
updateThriftServerAddressAnnouncement(announcer, driftServerPort(driftServer), nodeManager);
announcer.forceAnnounce();
refreshNodes();
}
private Map<String, String> getServerProperties(boolean resourceManagerEnabled, Map<String, String> properties, String environment, URI discoveryUri)
{
Map<String, String> serverProperties = new HashMap<>();
serverProperties.put("coordinator", String.valueOf(coordinator));
serverProperties.put("resource-manager", String.valueOf(resourceManager));
serverProperties.put("resource-manager-enabled", String.valueOf(resourceManagerEnabled));
serverProperties.put("presto.version", "testversion");
serverProperties.put("task.concurrency", "4");
serverProperties.put("task.max-worker-threads", "4");
serverProperties.put("exchange.client-threads", "4");
serverProperties.put("optimizer.ignore-stats-calculator-failures", "false");
if (coordinator) {
// enabling failure detector in tests can make them flakey
serverProperties.put("failure-detector.enabled", "false");
}
if (discoveryUri != null) {
requireNonNull(environment, "environment required when discoveryUri is present");
serverProperties.put("discovery.uri", discoveryUri.toString());
}
// Add these last so explicitly specified properties override the defaults
serverProperties.putAll(properties);
return ImmutableMap.copyOf(serverProperties);
}
@Override
public void close()
throws IOException
{
try {
if (lifeCycleManager != null) {
lifeCycleManager.stop();
}
}
catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
finally {
if (isDirectory(baseDataDir) && !preserveData) {
deleteRecursively(baseDataDir, ALLOW_INSECURE);
}
}
}
public void installPlugin(Plugin plugin)
{
pluginManager.installPlugin(plugin);
}
public DispatchManager getDispatchManager()
{
return dispatchManager;
}
public QueryManager getQueryManager()
{
return queryManager;
}
public Plan getQueryPlan(QueryId queryId)
{
checkState(coordinator, "not a coordinator");
checkState(queryManager instanceof SqlQueryManager);
return ((SqlQueryManager) queryManager).getQueryPlan(queryId);
}
public void addFinalQueryInfoListener(QueryId queryId, StateChangeListener<QueryInfo> stateChangeListener)
{
checkState(coordinator, "not a coordinator");
checkState(queryManager instanceof SqlQueryManager);
((SqlQueryManager) queryManager).addFinalQueryInfoListener(queryId, stateChangeListener);
}
public ConnectorId createCatalog(String catalogName, String connectorName)
{
return createCatalog(catalogName, connectorName, ImmutableMap.of());
}
public ConnectorId createCatalog(String catalogName, String connectorName, Map<String, String> properties)
{
ConnectorId connectorId = connectorManager.createConnection(catalogName, connectorName, properties);
updateConnectorIdAnnouncement(announcer, connectorId, nodeManager);
return connectorId;
}
public Path getBaseDataDir()
{
return baseDataDir;
}
public URI getBaseUrl()
{
return server.getBaseUrl();
}
public URI resolve(String path)
{
return server.getBaseUrl().resolve(path);
}
public HostAndPort getAddress()
{
return HostAndPort.fromParts(getBaseUrl().getHost(), getBaseUrl().getPort());
}
public HostAndPort getHttpsAddress()
{
URI httpsUri = server.getHttpServerInfo().getHttpsUri();
return HostAndPort.fromParts(httpsUri.getHost(), httpsUri.getPort());
}
public CatalogManager getCatalogManager()
{
return catalogManager;
}
public TransactionManager getTransactionManager()
{
return transactionManager;
}
public SqlParser getSqlParser()
{
return sqlParser;
}
public Metadata getMetadata()
{
return metadata;
}
public StatsCalculator getStatsCalculator()
{
checkState(coordinator, "not a coordinator");
return statsCalculator;
}
public Optional<EventListener> getEventListener()
{
checkState(coordinator, "not a coordinator");
return eventListenerManager.getEventListener();
}
public TestingAccessControlManager getAccessControl()
{
return accessControl;
}
public ProcedureTester getProcedureTester()
{
return procedureTester;
}
public SplitManager getSplitManager()
{
return splitManager;
}
public PageSourceManager getPageSourceManager()
{
return pageSourceManager;
}
public Optional<InternalResourceGroupManager<?>> getResourceGroupManager()
{
return resourceGroupManager;
}
public NodePartitioningManager getNodePartitioningManager()
{
return nodePartitioningManager;
}
public ConnectorPlanOptimizerManager getPlanOptimizerManager()
{
return planOptimizerManager;
}
public LocalMemoryManager getLocalMemoryManager()
{
return localMemoryManager;
}
public ClusterMemoryManager getClusterMemoryManager()
{
checkState(coordinator, "not a coordinator");
checkState(clusterMemoryManager instanceof ClusterMemoryManager);
return (ClusterMemoryManager) clusterMemoryManager;
}
public GracefulShutdownHandler getGracefulShutdownHandler()
{
return gracefulShutdownHandler;
}
public TaskManager getTaskManager()
{
return taskManager;
}
public ShutdownAction getShutdownAction()
{
return shutdownAction;
}
public boolean isCoordinator()
{
return coordinator;
}
public final AllNodes refreshNodes()
{
serviceSelectorManager.forceRefresh();
nodeManager.refreshNodes();
return nodeManager.getAllNodes();
}
public Set<InternalNode> getActiveNodesWithConnector(ConnectorId connectorId)
{
return nodeManager.getActiveConnectorNodes(connectorId);
}
public <T> T getInstance(Key<T> key)
{
return injector.getInstance(key);
}
public void stopResponding()
{
requestBlocker.block();
}
public void startResponding()
{
requestBlocker.unblock();
}
private static void updateConnectorIdAnnouncement(Announcer announcer, ConnectorId connectorId, InternalNodeManager nodeManager)
{
//
// This code was copied from PrestoServer, and is a hack that should be removed when the connectorId property is removed
//
// get existing announcement
ServiceAnnouncement announcement = getPrestoAnnouncement(announcer.getServiceAnnouncements());
// update connectorIds property
Map<String, String> properties = new LinkedHashMap<>(announcement.getProperties());
String property = nullToEmpty(properties.get("connectorIds"));
Set<String> connectorIds = new LinkedHashSet<>(Splitter.on(',').trimResults().omitEmptyStrings().splitToList(property));
connectorIds.add(connectorId.toString());
properties.put("connectorIds", Joiner.on(',').join(connectorIds));
// update announcement
announcer.removeServiceAnnouncement(announcement.getId());
announcer.addServiceAnnouncement(serviceAnnouncement(announcement.getType()).addProperties(properties).build());
announcer.forceAnnounce();
nodeManager.refreshNodes();
}
// TODO: announcement does not work for coordinator
private static void updateThriftServerAddressAnnouncement(Announcer announcer, int thriftPort, InternalNodeManager nodeManager)
{
// get existing announcement
ServiceAnnouncement announcement = getPrestoAnnouncement(announcer.getServiceAnnouncements());
// update announcement and thrift port property
Map<String, String> properties = new LinkedHashMap<>(announcement.getProperties());
properties.put("thriftServerPort", String.valueOf(thriftPort));
announcer.removeServiceAnnouncement(announcement.getId());
announcer.addServiceAnnouncement(serviceAnnouncement(announcement.getType()).addProperties(properties).build());
announcer.forceAnnounce();
nodeManager.refreshNodes();
}
private static ServiceAnnouncement getPrestoAnnouncement(Set<ServiceAnnouncement> announcements)
{
for (ServiceAnnouncement announcement : announcements) {
if (announcement.getType().equals("presto")) {
return announcement;
}
}
throw new RuntimeException("Presto announcement not found: " + announcements);
}
private static Path tempDirectory()
{
try {
return createTempDirectory("PrestoTest");
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static class RequestBlocker
implements Filter
{
private static final Object monitor = new Object();
private volatile boolean blocked;
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException
{
synchronized (monitor) {
while (blocked) {
try {
monitor.wait();
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
}
chain.doFilter(request, response);
}
public void block()
{
synchronized (monitor) {
blocked = true;
}
}
public void unblock()
{
synchronized (monitor) {
blocked = false;
monitor.notifyAll();
}
}
@Override
public void init(FilterConfig filterConfig) {}
@Override
public void destroy() {}
}
private static int driftServerPort(DriftServer server)
{
return ((DriftNettyServerTransport) server.getServerTransport()).getPort();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.rows;
import java.io.IOException;
import java.util.*;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.partitions.PartitionStatisticsCollector;
import org.apache.cassandra.io.util.DataInputPlus;
import org.apache.cassandra.io.util.DataOutputPlus;
/**
* Stats used for the encoding of the rows and tombstones of a given source.
* <p>
* Those stats are used to optimize the on-wire and on-disk storage of rows. More precisely,
* the {@code minTimestamp}, {@code minLocalDeletionTime} and {@code minTTL} stats are used to
* delta-encode those information for the sake of vint encoding.
* <p>
* Note that due to their use, those stats can suffer to be somewhat inaccurate (the more incurrate
* they are, the less effective the storage will be, but provided the stats are not completly wacky,
* this shouldn't have too huge an impact on performance) and in fact they will not always be
* accurate for reasons explained in {@link SerializationHeader#make}.
*/
public class EncodingStats
{
// Default values for the timestamp, deletion time and ttl. We use this both for NO_STATS, but also to serialize
// an EncodingStats. Basically, we encode the diff of each value of to these epoch, which give values with better vint encoding.
private static final long TIMESTAMP_EPOCH;
private static final int DELETION_TIME_EPOCH;
private static final int TTL_EPOCH = 0;
static
{
// We want a fixed epoch, but that provide small values when substracted from our timestamp and deletion time.
// So we somewhat arbitrary use the date of the summit 2015, which should hopefully roughly correspond to 3.0 release.
Calendar c = Calendar.getInstance(TimeZone.getTimeZone("GMT-0"), Locale.US);
c.set(Calendar.YEAR, 2015);
c.set(Calendar.MONTH, Calendar.SEPTEMBER);
c.set(Calendar.DAY_OF_MONTH, 22);
c.set(Calendar.HOUR_OF_DAY, 0);
c.set(Calendar.MINUTE, 0);
c.set(Calendar.SECOND, 0);
c.set(Calendar.MILLISECOND, 0);
TIMESTAMP_EPOCH = c.getTimeInMillis() * 1000; // timestamps should be in microseconds by convention
DELETION_TIME_EPOCH = (int)(c.getTimeInMillis() / 1000); // local deletion times are in seconds
}
// We should use this sparingly obviously
public static final EncodingStats NO_STATS = new EncodingStats(TIMESTAMP_EPOCH, DELETION_TIME_EPOCH, TTL_EPOCH);
public static final Serializer serializer = new Serializer();
public final long minTimestamp;
public final int minLocalDeletionTime;
public final int minTTL;
public EncodingStats(long minTimestamp,
int minLocalDeletionTime,
int minTTL)
{
// Note that the exact value of those don't impact correctness, just the efficiency of the encoding. So when we
// get a value for timestamp (resp. minLocalDeletionTime) that means 'no object had a timestamp' (resp. 'a local
// deletion time'), then what value we store for minTimestamp (resp. minLocalDeletionTime) doesn't matter, and
// it's thus more efficient to use our EPOCH numbers, since it will result in a guaranteed 1 byte encoding.
this.minTimestamp = minTimestamp == LivenessInfo.NO_TIMESTAMP ? TIMESTAMP_EPOCH : minTimestamp;
this.minLocalDeletionTime = minLocalDeletionTime == LivenessInfo.NO_EXPIRATION_TIME ? DELETION_TIME_EPOCH : minLocalDeletionTime;
this.minTTL = minTTL;
}
/**
* Merge this stats with another one.
* <p>
* The comments of {@link SerializationHeader#make} applies here too, i.e. the result of
* merging will be not totally accurate but we can live with that.
*/
public EncodingStats mergeWith(EncodingStats that)
{
long minTimestamp = this.minTimestamp == TIMESTAMP_EPOCH
? that.minTimestamp
: (that.minTimestamp == TIMESTAMP_EPOCH ? this.minTimestamp : Math.min(this.minTimestamp, that.minTimestamp));
int minDelTime = this.minLocalDeletionTime == DELETION_TIME_EPOCH
? that.minLocalDeletionTime
: (that.minLocalDeletionTime == DELETION_TIME_EPOCH ? this.minLocalDeletionTime : Math.min(this.minLocalDeletionTime, that.minLocalDeletionTime));
int minTTL = this.minTTL == TTL_EPOCH
? that.minTTL
: (that.minTTL == TTL_EPOCH ? this.minTTL : Math.min(this.minTTL, that.minTTL));
return new EncodingStats(minTimestamp, minDelTime, minTTL);
}
@Override
public boolean equals(Object o)
{
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EncodingStats that = (EncodingStats) o;
return this.minLocalDeletionTime == that.minLocalDeletionTime
&& this.minTTL == that.minTTL
&& this.minTimestamp == that.minTimestamp;
}
@Override
public int hashCode()
{
return Objects.hash(minTimestamp, minLocalDeletionTime, minTTL);
}
@Override
public String toString()
{
return String.format("EncodingStats(ts=%d, ldt=%d, ttl=%d)", minTimestamp, minLocalDeletionTime, minTTL);
}
public static class Collector implements PartitionStatisticsCollector
{
private boolean isTimestampSet;
private long minTimestamp = Long.MAX_VALUE;
private boolean isDelTimeSet;
private int minDeletionTime = Integer.MAX_VALUE;
private boolean isTTLSet;
private int minTTL = Integer.MAX_VALUE;
public void update(LivenessInfo info)
{
if (info.isEmpty())
return;
updateTimestamp(info.timestamp());
if (info.isExpiring())
{
updateTTL(info.ttl());
updateLocalDeletionTime(info.localExpirationTime());
}
}
public void update(Cell cell)
{
updateTimestamp(cell.timestamp());
if (cell.isExpiring())
{
updateTTL(cell.ttl());
updateLocalDeletionTime(cell.localDeletionTime());
}
else if (cell.isTombstone())
{
updateLocalDeletionTime(cell.localDeletionTime());
}
}
public void update(DeletionTime deletionTime)
{
if (deletionTime.isLive())
return;
updateTimestamp(deletionTime.markedForDeleteAt());
updateLocalDeletionTime(deletionTime.localDeletionTime());
}
public void updateTimestamp(long timestamp)
{
isTimestampSet = true;
minTimestamp = Math.min(minTimestamp, timestamp);
}
public void updateLocalDeletionTime(int deletionTime)
{
isDelTimeSet = true;
minDeletionTime = Math.min(minDeletionTime, deletionTime);
}
public void updateTTL(int ttl)
{
isTTLSet = true;
minTTL = Math.min(minTTL, ttl);
}
public void updateColumnSetPerRow(long columnSetInRow)
{
}
public void updateHasLegacyCounterShards(boolean hasLegacyCounterShards)
{
// We don't care about this but this come with PartitionStatisticsCollector
}
public EncodingStats get()
{
return new EncodingStats(isTimestampSet ? minTimestamp : TIMESTAMP_EPOCH,
isDelTimeSet ? minDeletionTime : DELETION_TIME_EPOCH,
isTTLSet ? minTTL : TTL_EPOCH);
}
public static EncodingStats collect(Row staticRow, Iterator<Row> rows, DeletionInfo deletionInfo)
{
Collector collector = new Collector();
deletionInfo.collectStats(collector);
if (!staticRow.isEmpty())
Rows.collectStats(staticRow, collector);
while (rows.hasNext())
Rows.collectStats(rows.next(), collector);
return collector.get();
}
}
public static class Serializer
{
public void serialize(EncodingStats stats, DataOutputPlus out) throws IOException
{
out.writeVInt(stats.minTimestamp - TIMESTAMP_EPOCH);
out.writeVInt(stats.minLocalDeletionTime - DELETION_TIME_EPOCH);
out.writeVInt(stats.minTTL - TTL_EPOCH);
}
public int serializedSize(EncodingStats stats)
{
return TypeSizes.sizeofVInt(stats.minTimestamp - TIMESTAMP_EPOCH)
+ TypeSizes.sizeofVInt(stats.minLocalDeletionTime - DELETION_TIME_EPOCH)
+ TypeSizes.sizeofVInt(stats.minTTL - TTL_EPOCH);
}
public EncodingStats deserialize(DataInputPlus in) throws IOException
{
long minTimestamp = in.readVInt() + TIMESTAMP_EPOCH;
int minLocalDeletionTime = (int)in.readVInt() + DELETION_TIME_EPOCH;
int minTTL = (int)in.readVInt() + TTL_EPOCH;
return new EncodingStats(minTimestamp, minLocalDeletionTime, minTTL);
}
}
}
| |
package com.eveningoutpost.dexdrip.UtilityModels;
import android.content.Context;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.preference.PreferenceManager;
import com.eveningoutpost.dexdrip.Models.BgReading;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.TimeZone;
import lecho.lib.hellocharts.model.Axis;
import lecho.lib.hellocharts.model.AxisValue;
import lecho.lib.hellocharts.model.Line;
import lecho.lib.hellocharts.model.LineChartData;
import lecho.lib.hellocharts.model.PointValue;
import lecho.lib.hellocharts.model.Viewport;
import lecho.lib.hellocharts.util.Utils;
import lecho.lib.hellocharts.view.Chart;
/**
* Created by stephenblack on 11/15/14.
*/
public class BgGraphBuilder {
public double end_time = new Date().getTime() + (60000 * 10);
public double start_time = end_time - (60000 * 60 * 24);
public Context context;
public SharedPreferences prefs;
public double highMark;
public double lowMark;
public double defaultMinY;
public double defaultMaxY;
public boolean doMgdl;
private double endHour;
private final int numValues =(60/5)*24;
private final List<BgReading> bgReadings = BgReading.latestForGraph( numValues, start_time);
private List<PointValue> inRangeValues = new ArrayList<PointValue>();
private List<PointValue> highValues = new ArrayList<PointValue>();
private List<PointValue> lowValues = new ArrayList<PointValue>();
public Viewport viewport;
public BgGraphBuilder(Context context){
this.context = context;
this.prefs = PreferenceManager.getDefaultSharedPreferences(context);
this.highMark = Double.parseDouble(prefs.getString("highValue", "170"));
this.lowMark = Double.parseDouble(prefs.getString("lowValue", "70"));
this.doMgdl = (prefs.getString("units", "mgdl").compareTo("mgdl") == 0);
defaultMinY = unitized(40);
defaultMaxY = unitized(250);
}
public LineChartData lineData() {
LineChartData lineData = new LineChartData(defaultLines());
lineData.setAxisYLeft(yAxis());
lineData.setAxisXBottom(xAxis());
return lineData;
}
public LineChartData previewLineData() {
LineChartData previewLineData = new LineChartData(lineData());
previewLineData.setAxisYLeft(yAxis());
previewLineData.setAxisXBottom(previewXAxis());
previewLineData.getLines().get(4).setPointRadius(2);
previewLineData.getLines().get(5).setPointRadius(2);
previewLineData.getLines().get(6).setPointRadius(2);
return previewLineData;
}
public List<Line> defaultLines() {
addBgReadingValues();
List<Line> lines = new ArrayList<Line>();
lines.add(minShowLine());
lines.add(maxShowLine());
lines.add(highLine());
lines.add(lowLine());
lines.add(inRangeValuesLine());
lines.add(lowValuesLine());
lines.add(highValuesLine());
return lines;
}
public Line highValuesLine() {
Line highValuesLine = new Line(highValues);
highValuesLine.setColor(Utils.COLOR_ORANGE);
highValuesLine.setHasLines(false);
highValuesLine.setPointRadius(3);
highValuesLine.setHasPoints(true);
return highValuesLine;
}
public Line lowValuesLine() {
Line lowValuesLine = new Line(lowValues);
lowValuesLine.setColor(Color.parseColor("#C30909"));
lowValuesLine.setHasLines(false);
lowValuesLine.setPointRadius(3);
lowValuesLine.setHasPoints(true);
return lowValuesLine;
}
public Line inRangeValuesLine() {
Line inRangeValuesLine = new Line(inRangeValues);
inRangeValuesLine.setColor(Utils.COLOR_BLUE);
inRangeValuesLine.setHasLines(false);
inRangeValuesLine.setPointRadius(3);
inRangeValuesLine.setHasPoints(true);
return inRangeValuesLine;
}
private void addBgReadingValues() {
for (BgReading bgReading : bgReadings) {
if (bgReading.calculated_value >= 400) {
highValues.add(new PointValue((float) bgReading.timestamp, (float) unitized(400)));
} else if (unitized(bgReading.calculated_value) >= highMark) {
highValues.add(new PointValue((float) bgReading.timestamp, (float) unitized(bgReading.calculated_value)));
} else if (unitized(bgReading.calculated_value) >= lowMark) {
inRangeValues.add(new PointValue((float) bgReading.timestamp, (float) unitized(bgReading.calculated_value)));
} else if (bgReading.calculated_value >= 40) {
lowValues.add(new PointValue((float)bgReading.timestamp, (float) unitized(bgReading.calculated_value)));
} else {
lowValues.add(new PointValue((float)bgReading.timestamp, (float) unitized(40)));
}
}
}
public Line highLine() {
List<PointValue> highLineValues = new ArrayList<PointValue>();
highLineValues.add(new PointValue((float)start_time, (float)highMark));
highLineValues.add(new PointValue((float)end_time, (float)highMark));
Line highLine = new Line(highLineValues);
highLine.setHasPoints(false);
highLine.setStrokeWidth(1);
highLine.setColor(Utils.COLOR_ORANGE);
return highLine;
}
public Line lowLine() {
List<PointValue> lowLineValues = new ArrayList<PointValue>();
lowLineValues.add(new PointValue((float)start_time, (float)lowMark));
lowLineValues.add(new PointValue((float)end_time, (float)lowMark));
Line lowLine = new Line(lowLineValues);
lowLine.setHasPoints(false);
lowLine.setAreaTransparency(50);
lowLine.setColor(Color.parseColor("#C30909"));
lowLine.setStrokeWidth(1);
lowLine.setFilled(true);
return lowLine;
}
public Line maxShowLine() {
List<PointValue> maxShowValues = new ArrayList<PointValue>();
maxShowValues.add(new PointValue((float)start_time, (float)defaultMaxY));
maxShowValues.add(new PointValue((float)end_time, (float)defaultMaxY));
Line maxShowLine = new Line(maxShowValues);
maxShowLine.setHasLines(false);
maxShowLine.setHasPoints(false);
return maxShowLine;
}
public Line minShowLine() {
List<PointValue> minShowValues = new ArrayList<PointValue>();
minShowValues.add(new PointValue((float)start_time, (float)defaultMinY));
minShowValues.add(new PointValue((float)end_time, (float)defaultMinY));
Line minShowLine = new Line(minShowValues);
minShowLine.setHasPoints(false);
minShowLine.setHasLines(false);
return minShowLine;
}
/////////AXIS RELATED//////////////
public Axis yAxis() {
Axis yAxis = new Axis();
yAxis.setAutoGenerated(false);
List<AxisValue> axisValues = new ArrayList<AxisValue>();
for(int j = 1; j <= 12; j += 1) {
if (doMgdl) {
axisValues.add(new AxisValue(j * 50));
} else {
axisValues.add(new AxisValue(j*2));
}
}
yAxis.setValues(axisValues);
yAxis.setHasLines(true);
yAxis.setMaxLabelChars(5);
yAxis.setInside(true);
return yAxis;
}
public Axis xAxis() {
Axis xAxis = new Axis();
xAxis.setAutoGenerated(false);
List<AxisValue> xAxisValues = new ArrayList<AxisValue>();
GregorianCalendar now = new GregorianCalendar();
GregorianCalendar today = new GregorianCalendar(now.get(Calendar.YEAR), now.get(Calendar.MONTH), now.get(Calendar.DAY_OF_MONTH));
SimpleDateFormat timeFormat = new SimpleDateFormat("h a");
timeFormat.setTimeZone(TimeZone.getDefault());
double start_hour = today.getTime().getTime();
double timeNow = new Date().getTime();
for(int l=0; l<=24; l++) {
if ((start_hour + (60000 * 60 * (l))) < timeNow) {
if((start_hour + (60000 * 60 * (l + 1))) >= timeNow) {
endHour = start_hour + (60000 * 60 * (l));
l=25;
}
}
}
for(int l=0; l<=24; l++) {
double timestamp = endHour - (60000 * 60 * l);
xAxisValues.add(new AxisValue((long)(timestamp), (timeFormat.format(timestamp)).toCharArray()));
}
xAxis.setValues(xAxisValues);
xAxis.setHasLines(true);
return xAxis;
}
public Axis previewXAxis(){
List<AxisValue> previewXaxisValues = new ArrayList<AxisValue>();
SimpleDateFormat timeFormat = new SimpleDateFormat("h a");
timeFormat.setTimeZone(TimeZone.getDefault());
for(int l=0; l<=24; l++) {
double timestamp = endHour - (60000 * 60 * l);
previewXaxisValues.add(new AxisValue((long)(timestamp), (timeFormat.format(timestamp)).toCharArray()));
}
Axis previewXaxis = new Axis();
previewXaxis.setValues(previewXaxisValues);
previewXaxis.setHasLines(true);
previewXaxis.setTextSize(5);
return previewXaxis;
}
/////////VIEWPORT RELATED//////////////
public Viewport advanceViewport(Chart chart, Chart previewChart) {
viewport = new Viewport(previewChart.getMaximumViewport());
viewport.inset((float)(86400000 / 2.5), 0);
double distance_to_move = (new Date().getTime()) - viewport.left - (((viewport.right - viewport.left) /2));
viewport.offset((float) distance_to_move, 0);
return viewport;
}
public double unitized(double value) {
if(doMgdl) {
return value;
} else {
return mmolConvert(value);
}
}
public String unitized_string(double value) {
DecimalFormat df = new DecimalFormat("#");
df.setMaximumFractionDigits(0);
if (value >= 400) {
return "HIGH";
} else if (value >= 40) {
if(doMgdl) {
df.setMaximumFractionDigits(0);
return df.format(value);
} else {
df.setMaximumFractionDigits(1);
return df.format(mmolConvert(value));
}
} else {
return "LOW";
}
}
public double mmolConvert(double mgdl) {
return mgdl / 18;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.utils.db;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.management.StandardMBean;
import org.apache.log4j.Logger;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.mgmt.JmxUtil;
/**
* ConnectionConcierge keeps stand alone database connections alive. This is
* needs someone to keep that database connection from being garbage collected
*
*/
public class ConnectionConcierge {
static final Logger s_logger = Logger.getLogger(ConnectionConcierge.class);
static final ConnectionConciergeManager s_mgr = new ConnectionConciergeManager();
Connection _conn;
String _name;
boolean _keepAlive;
boolean _autoCommit;
int _isolationLevel;
int _holdability;
public ConnectionConcierge(String name, Connection conn, boolean keepAlive) {
_name = name + s_mgr.getNextId();
_keepAlive = keepAlive;
try {
_autoCommit = conn.getAutoCommit();
_isolationLevel = conn.getTransactionIsolation();
_holdability = conn.getHoldability();
} catch (SQLException e) {
throw new CloudRuntimeException("Unable to get information from the connection object", e);
}
reset(conn);
}
public void reset(Connection conn) {
try {
release();
} catch (Throwable th) {
s_logger.error("Unable to release a connection", th);
}
_conn = conn;
try {
_conn.setAutoCommit(_autoCommit);
_conn.setHoldability(_holdability);
_conn.setTransactionIsolation(_isolationLevel);
} catch (SQLException e) {
s_logger.error("Unable to release a connection", e);
}
s_mgr.register(_name, this);
s_logger.debug("Registering a database connection for " + _name);
}
public final Connection conn() {
return _conn;
}
public void release() {
s_mgr.unregister(_name);
try {
if (_conn != null) {
_conn.close();
}
_conn = null;
} catch (SQLException e) {
throw new CloudRuntimeException("Problem in closing a connection", e);
}
}
@Override
protected void finalize() throws Exception {
if (_conn != null) {
release();
}
}
public boolean keepAlive() {
return _keepAlive;
}
protected static class ConnectionConciergeManager extends StandardMBean implements ConnectionConciergeMBean {
ScheduledExecutorService _executor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("ConnectionKeeper"));
final ConcurrentHashMap<String, ConnectionConcierge> _conns = new ConcurrentHashMap<String, ConnectionConcierge>();
final AtomicInteger _idGenerator = new AtomicInteger();
ConnectionConciergeManager() {
super(ConnectionConciergeMBean.class, false);
resetKeepAliveTask(20);
try {
JmxUtil.registerMBean("DB Connections", "DB Connections", this);
} catch (Exception e) {
s_logger.error("Unable to register mbean", e);
}
}
public Integer getNextId() {
return _idGenerator.incrementAndGet();
}
public void register(String name, ConnectionConcierge concierge) {
_conns.put(name, concierge);
}
public void unregister(String name) {
_conns.remove(name);
}
protected String testValidity(String name, Connection conn) {
PreparedStatement pstmt = null;
try {
if (conn != null) {
pstmt = conn.prepareStatement("SELECT 1");
pstmt.executeQuery();
}
return null;
} catch (Throwable th) {
s_logger.error("Unable to keep the db connection for " + name, th);
return th.toString();
} finally {
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
}
}
}
}
@Override
public List<String> testValidityOfConnections() {
ArrayList<String> results = new ArrayList<String>(_conns.size());
for (Map.Entry<String, ConnectionConcierge> entry : _conns.entrySet()) {
String result = testValidity(entry.getKey(), entry.getValue().conn());
results.add(entry.getKey() + "=" + (result == null ? "OK" : result));
}
return results;
}
@Override
public String resetConnection(String name) {
ConnectionConcierge concierge = _conns.get(name);
if (concierge == null) {
return "Not Found";
}
Connection conn = Transaction.getStandaloneConnection();
if (conn == null) {
return "Unable to get anotehr db connection";
}
concierge.reset(conn);
return "Done";
}
@Override
public String resetKeepAliveTask(int seconds) {
if (_executor != null) {
try {
_executor.shutdown();
} catch(Exception e) {
s_logger.error("Unable to shutdown executor", e);
}
}
_executor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("ConnectionConcierge"));
_executor.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
s_logger.trace("connection concierge keep alive task");
for (Map.Entry<String, ConnectionConcierge> entry : _conns.entrySet()) {
ConnectionConcierge concierge = entry.getValue();
if (concierge.keepAlive()) {
testValidity(entry.getKey(), entry.getValue().conn());
}
}
}
}, 0, seconds, TimeUnit.SECONDS);
return "As you wish.";
}
@Override
public List<String> getConnectionsNotPooled() {
return new ArrayList<String>(_conns.keySet());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.task;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.ExecutorService;
import org.apache.samza.SamzaException;
import org.apache.samza.context.Context;
import org.apache.samza.operators.OperatorSpecGraph;
import org.apache.samza.operators.impl.InputOperatorImpl;
import org.apache.samza.operators.impl.OperatorImplGraph;
import org.apache.samza.system.EndOfStreamMessage;
import org.apache.samza.system.IncomingMessageEnvelope;
import org.apache.samza.system.MessageType;
import org.apache.samza.system.SystemStream;
import org.apache.samza.system.WatermarkMessage;
import org.apache.samza.util.Clock;
import org.apache.samza.util.SystemClock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link StreamTask} implementation that brings all the operator API implementation components together and
* feeds the input messages into the user-defined transformation chains in {@link OperatorSpecGraph}.
*/
public class StreamOperatorTask implements AsyncStreamTask, InitableTask, WindowableTask, ClosableTask {
private static final Logger LOG = LoggerFactory.getLogger(StreamOperatorTask.class);
private final OperatorSpecGraph specGraph;
private final Clock clock;
/*
* Thread pool used by the task to schedule processing of incoming messages. If job.container.thread.pool.size is
* not configured, this will be null. We don't want to create an executor service within StreamOperatorTask due to
* following reasons
* 1. It is harder to reason about the lifecycle of the executor service
* 2. We end up with thread pool proliferation. Especially for jobs with high number of tasks.
*/
private ExecutorService taskThreadPool;
private OperatorImplGraph operatorImplGraph;
/**
* Constructs an adaptor task to run the user-implemented {@link OperatorSpecGraph}.
* @param specGraph the serialized version of user-implemented {@link OperatorSpecGraph}
* that includes the logical DAG
* @param clock the {@link Clock} to use for time-keeping
*/
public StreamOperatorTask(OperatorSpecGraph specGraph, Clock clock) {
this.specGraph = specGraph.clone();
this.clock = clock;
}
public StreamOperatorTask(OperatorSpecGraph specGraph) {
this(specGraph, SystemClock.instance());
}
/**
* Initializes this task during startup.
* <p>
* Implementation: Initializes the runtime {@link OperatorImplGraph} according to user-defined {@link OperatorSpecGraph}.
* Users set the input and output streams and the task-wide context manager using
* {@link org.apache.samza.application.descriptors.StreamApplicationDescriptor} APIs, and the logical transforms
* using the {@link org.apache.samza.operators.MessageStream} APIs. After the
* {@link org.apache.samza.application.descriptors.StreamApplicationDescriptorImpl} is initialized once by the
* application, it then creates an immutable {@link OperatorSpecGraph} accordingly, which is passed in to this
* class to create the {@link OperatorImplGraph} corresponding to the logical DAG.
*
* @param context allows initializing and accessing contextual data of this StreamTask
* @throws Exception in case of initialization errors
*/
@Override
public final void init(Context context) throws Exception {
// create the operator impl DAG corresponding to the logical operator spec DAG
this.operatorImplGraph = new OperatorImplGraph(specGraph, context, clock);
}
/**
* Passes the incoming message envelopes along to the {@link InputOperatorImpl} node
* for the input {@link SystemStream}. It is non-blocking and dispatches the message to the container thread
* pool. The thread pool size is configured through job.container.thread.pool.size. In the absence of the config,
* the task executes the DAG on the run loop thread.
* <p>
* From then on, each {@link org.apache.samza.operators.impl.OperatorImpl} propagates its transformed output to
* its chained {@link org.apache.samza.operators.impl.OperatorImpl}s itself.
*
* @param ime incoming message envelope to process
* @param collector the collector to send messages with
* @param coordinator the coordinator to request commits or shutdown
* @param callback the task callback handle
*/
@Override
public final void processAsync(IncomingMessageEnvelope ime, MessageCollector collector, TaskCoordinator coordinator,
TaskCallback callback) {
Runnable processRunnable = () -> {
try {
SystemStream systemStream = ime.getSystemStreamPartition().getSystemStream();
InputOperatorImpl inputOpImpl = operatorImplGraph.getInputOperator(systemStream);
if (inputOpImpl != null) {
CompletionStage<Void> processFuture;
MessageType messageType = MessageType.of(ime.getMessage());
switch (messageType) {
case USER_MESSAGE:
processFuture = inputOpImpl.onMessageAsync(ime, collector, coordinator);
break;
case END_OF_STREAM:
EndOfStreamMessage eosMessage = (EndOfStreamMessage) ime.getMessage();
processFuture =
inputOpImpl.aggregateEndOfStream(eosMessage, ime.getSystemStreamPartition(), collector, coordinator);
break;
case WATERMARK:
WatermarkMessage watermarkMessage = (WatermarkMessage) ime.getMessage();
processFuture = inputOpImpl.aggregateWatermark(watermarkMessage, ime.getSystemStreamPartition(), collector,
coordinator);
break;
default:
processFuture = failedFuture(new SamzaException("Unknown message type " + messageType + " encountered."));
break;
}
processFuture.whenComplete((val, ex) -> {
if (ex != null) {
callback.failure(ex);
} else {
callback.complete();
}
});
} else {
// If InputOperator is not found in the operator graph for a given SystemStream, throw an exception else the
// job will timeout due to async task callback timeout (TaskCallbackTimeoutException)
final String errMessage = String.format("InputOperator not found in OperatorGraph for %s. The available input"
+ " operators are: %s. Please check SystemStream configuration for the `SystemConsumer` and/or task.inputs"
+ " task configuration.", systemStream, operatorImplGraph.getAllInputOperators());
LOG.error(errMessage);
callback.failure(new SamzaException(errMessage));
}
} catch (Exception e) {
LOG.error("Failed to process the incoming message due to ", e);
callback.failure(e);
}
};
if (taskThreadPool != null) {
LOG.debug("Processing message using thread pool.");
taskThreadPool.submit(processRunnable);
} else {
LOG.debug("Processing message on the run loop thread.");
processRunnable.run();
}
}
@Override
public final void window(MessageCollector collector, TaskCoordinator coordinator) {
CompletableFuture<Void> windowFuture = CompletableFuture.allOf(operatorImplGraph.getAllInputOperators()
.stream()
.map(inputOperator -> inputOperator.onTimer(collector, coordinator))
.toArray(CompletableFuture[]::new));
windowFuture.join();
}
@Override
public void close() throws Exception {
if (operatorImplGraph != null) {
operatorImplGraph.close();
}
}
/* package private setter for TaskFactoryUtil to initialize the taskThreadPool */
void setTaskThreadPool(ExecutorService taskThreadPool) {
this.taskThreadPool = taskThreadPool;
}
/**
* Package private setter for private var operatorImplGraph to be used in TestStreamOperatorTask tests.
* */
@VisibleForTesting
void setOperatorImplGraph(OperatorImplGraph operatorImplGraph) {
this.operatorImplGraph = operatorImplGraph;
}
/* package private for testing */
OperatorImplGraph getOperatorImplGraph() {
return this.operatorImplGraph;
}
private static CompletableFuture<Void> failedFuture(Throwable ex) {
Preconditions.checkNotNull(ex);
CompletableFuture<Void> failedFuture = new CompletableFuture<>();
failedFuture.completeExceptionally(ex);
return failedFuture;
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.common.base.MoreObjects;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionAnalysisMetadata;
import com.google.devtools.build.lib.actions.ActionAnalysisMetadata.MiddlemanType;
import com.google.devtools.build.lib.actions.ActionLookupData;
import com.google.devtools.build.lib.actions.ActionLookupValue;
import com.google.devtools.build.lib.actions.ActionLookupValue.ActionLookupKey;
import com.google.devtools.build.lib.actions.ActionTemplate;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.DerivedArtifact;
import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact;
import com.google.devtools.build.lib.actions.ArtifactOwner;
import com.google.devtools.build.lib.actions.FileArtifactValue;
import com.google.devtools.build.lib.actions.FileValue;
import com.google.devtools.build.lib.actions.FilesetTraversalParams.DirectTraversalRoot;
import com.google.devtools.build.lib.actions.FilesetTraversalParams.PackageBoundaryMode;
import com.google.devtools.build.lib.actions.MissingInputFileException;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalFunction.RecursiveFilesystemTraversalException;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.ResolvedFile;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.TraversalRequest;
import com.google.devtools.build.lib.util.Fingerprint;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.lib.vfs.RootedPath;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyFunctionException.Transience;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import java.io.IOException;
import java.util.Comparator;
import java.util.Map;
import java.util.function.Supplier;
import javax.annotation.Nullable;
/**
* A builder of values for {@link Artifact} keys when the key is not a simple generated artifact. To
* save memory, ordinary generated artifacts (non-middleman, non-tree) have their metadata accessed
* directly from the corresponding {@link ActionExecutionValue}. This SkyFunction is therefore only
* usable for source, middleman, and tree artifacts.
*/
class ArtifactFunction implements SkyFunction {
private final Supplier<Boolean> mkdirForTreeArtifacts;
public static final class MissingFileArtifactValue implements SkyValue {
private final MissingInputFileException exception;
private MissingFileArtifactValue(MissingInputFileException e) {
this.exception = e;
}
public MissingInputFileException getException() {
return exception;
}
}
public ArtifactFunction(Supplier<Boolean> mkdirForTreeArtifacts) {
this.mkdirForTreeArtifacts = mkdirForTreeArtifacts;
}
@Override
public SkyValue compute(SkyKey skyKey, Environment env)
throws ArtifactFunctionException, InterruptedException {
Artifact artifact = (Artifact) skyKey;
if (artifact.isSourceArtifact()) {
try {
return createSourceValue(artifact, env);
} catch (IOException e) {
throw new ArtifactFunctionException(e, Transience.TRANSIENT);
}
}
Artifact.DerivedArtifact derivedArtifact = (DerivedArtifact) artifact;
ArtifactDependencies artifactDependencies =
ArtifactDependencies.discoverDependencies(derivedArtifact, env);
if (artifactDependencies == null) {
return null;
}
// If the action is an ActionTemplate, we need to expand the ActionTemplate into concrete
// actions, execute those actions in parallel and then aggregate the action execution results.
if (artifactDependencies.isTemplateActionForTreeArtifact()) {
if (mkdirForTreeArtifacts.get()) {
mkdirForTreeArtifact(artifact, env);
}
return createTreeArtifactValueFromActionKey(artifactDependencies, env);
}
ActionLookupData generatingActionKey = derivedArtifact.getGeneratingActionKey();
ActionExecutionValue actionValue = (ActionExecutionValue) env.getValue(generatingActionKey);
if (actionValue == null) {
return null;
}
if (artifact.isTreeArtifact()) {
// We got a request for the whole tree artifact. We can just return the associated
// TreeArtifactValue.
return Preconditions.checkNotNull(actionValue.getTreeArtifactValue(artifact), artifact);
}
Preconditions.checkState(artifact.isMiddlemanArtifact(), artifact);
Action action =
Preconditions.checkNotNull(
artifactDependencies.actionLookupValue.getAction(generatingActionKey.getActionIndex()),
"Null middleman action? %s",
artifactDependencies);
FileArtifactValue individualMetadata =
Preconditions.checkNotNull(
actionValue.getArtifactValue(artifact), "%s %s", artifact, actionValue);
if (isAggregatingValue(action)) {
return createAggregatingValue(artifact, action, individualMetadata, env);
}
return individualMetadata;
}
private static void mkdirForTreeArtifact(Artifact artifact, Environment env)
throws ArtifactFunctionException {
try {
artifact.getPath().createDirectoryAndParents();
} catch (IOException e) {
env.getListener()
.handle(
Event.error(
String.format(
"Failed to create output directory for TreeArtifact %s: %s",
artifact, e.getMessage())));
throw new ArtifactFunctionException(e, Transience.TRANSIENT);
}
}
private static TreeArtifactValue createTreeArtifactValueFromActionKey(
ArtifactDependencies artifactDependencies, Environment env) throws InterruptedException {
// Request the list of expanded actions from the ActionTemplate.
ActionTemplateExpansion actionTemplateExpansion =
artifactDependencies.getActionTemplateExpansion(env);
if (actionTemplateExpansion == null) {
// The expanded actions are not yet available.
return null;
}
ActionTemplateExpansionValue expansionValue = actionTemplateExpansion.getValue();
ImmutableList<ActionLookupData> expandedActionExecutionKeys =
actionTemplateExpansion.getExpandedActionExecutionKeys();
Map<SkyKey, SkyValue> expandedActionValueMap = env.getValues(expandedActionExecutionKeys);
if (env.valuesMissing()) {
// The execution values of the expanded actions are not yet all available.
return null;
}
// Aggregate the ArtifactValues for individual TreeFileArtifacts into a TreeArtifactValue for
// the parent TreeArtifact.
ImmutableMap.Builder<TreeFileArtifact, FileArtifactValue> map = ImmutableMap.builder();
for (ActionLookupData actionKey : expandedActionExecutionKeys) {
ActionExecutionValue actionExecutionValue =
(ActionExecutionValue)
Preconditions.checkNotNull(
expandedActionValueMap.get(actionKey),
"Missing tree value: %s %s %s",
artifactDependencies,
expansionValue,
expandedActionValueMap);
Iterable<TreeFileArtifact> treeFileArtifacts =
Iterables.transform(
Iterables.filter(
actionExecutionValue.getAllFileValues().keySet(),
artifact -> {
Preconditions.checkState(
artifact.hasParent(),
"No parent: %s %s %s",
artifact,
actionExecutionValue,
artifactDependencies);
return artifact.getParent().equals(artifactDependencies.artifact);
}),
artifact -> (TreeFileArtifact) artifact);
Preconditions.checkState(
!Iterables.isEmpty(treeFileArtifacts),
"Action denoted by %s does not output TreeFileArtifact from %s",
actionKey,
artifactDependencies);
for (TreeFileArtifact treeFileArtifact : treeFileArtifacts) {
FileArtifactValue value =
ActionExecutionValue.createSimpleFileArtifactValue(
treeFileArtifact, actionExecutionValue);
map.put(treeFileArtifact, value);
}
}
// Return the aggregated TreeArtifactValue.
return TreeArtifactValue.create(map.build());
}
private static SkyValue createSourceValue(Artifact artifact, Environment env)
throws IOException, InterruptedException {
RootedPath path = RootedPath.toRootedPath(artifact.getRoot().getRoot(), artifact.getPath());
SkyKey fileSkyKey = FileValue.key(path);
FileValue fileValue;
try {
fileValue = (FileValue) env.getValueOrThrow(fileSkyKey, IOException.class);
} catch (IOException e) {
return makeMissingInputFileValue(artifact, e);
}
if (fileValue == null) {
return null;
}
if (!fileValue.exists()) {
return makeMissingInputFileValue(artifact, null);
}
// For directory artifacts that are not Filesets, we initiate a directory traversal here, and
// compute a hash from the directory structure.
if (fileValue.isDirectory() && TrackSourceDirectoriesFlag.trackSourceDirectories()) {
// We rely on the guarantees of RecursiveFilesystemTraversalFunction for correctness.
//
// This approach may have unexpected interactions with --package_path. In particular, the exec
// root is setup from the loading / analysis phase, and it is now too late to change it;
// therefore, this may traverse a different set of files depending on which targets are built
// at the same time and what the package-path layout is (this may be moot if there is only one
// entry). Or this may return a set of files that's inconsistent with those actually available
// to the action (for local execution).
//
// In the future, we need to make this result the source of truth for the files available to
// the action so that we at least have consistency.
TraversalRequest request = TraversalRequest.create(
DirectTraversalRoot.forRootedPath(path),
/*isRootGenerated=*/ false,
PackageBoundaryMode.CROSS,
/*strictOutputFiles=*/ true,
/*skipTestingForSubpackage=*/ true,
/*errorInfo=*/ null);
RecursiveFilesystemTraversalValue value;
try {
value =
(RecursiveFilesystemTraversalValue) env.getValueOrThrow(
request, RecursiveFilesystemTraversalException.class);
} catch (RecursiveFilesystemTraversalException e) {
throw new IOException(e);
}
if (value == null) {
return null;
}
Fingerprint fp = new Fingerprint();
for (ResolvedFile file : value.getTransitiveFiles().toList()) {
fp.addString(file.getNameInSymlinkTree().getPathString());
fp.addBytes(file.getMetadata().getDigest());
}
return FileArtifactValue.createForDirectoryWithHash(fp.digestAndReset());
}
try {
return FileArtifactValue.createForSourceArtifact(artifact, fileValue);
} catch (IOException e) {
return makeMissingInputFileValue(artifact, e);
}
}
private static SkyValue makeMissingInputFileValue(Artifact artifact, Exception failure) {
String extraMsg = (failure == null) ? "" : (":" + failure.getMessage());
MissingInputFileException ex =
new MissingInputFileException(constructErrorMessage(artifact) + extraMsg, null);
return new MissingFileArtifactValue(ex);
}
@Nullable
private static AggregatingArtifactValue createAggregatingValue(
Artifact artifact,
ActionAnalysisMetadata action,
FileArtifactValue value,
SkyFunction.Environment env)
throws InterruptedException {
ImmutableList.Builder<Pair<Artifact, FileArtifactValue>> fileInputsBuilder =
ImmutableList.builder();
ImmutableList.Builder<Pair<Artifact, TreeArtifactValue>> directoryInputsBuilder =
ImmutableList.builder();
// Avoid iterating over nested set twice.
Iterable<Artifact> inputs = action.getInputs().toList();
Map<SkyKey, SkyValue> values = env.getValues(Artifact.keys(inputs));
if (env.valuesMissing()) {
return null;
}
for (Artifact input : inputs) {
SkyValue inputValue = Preconditions.checkNotNull(values.get(Artifact.key(input)), input);
if (inputValue instanceof FileArtifactValue) {
fileInputsBuilder.add(Pair.of(input, (FileArtifactValue) inputValue));
} else if (inputValue instanceof ActionExecutionValue) {
fileInputsBuilder.add(
Pair.of(
input,
ActionExecutionValue.createSimpleFileArtifactValue(
(DerivedArtifact) input, (ActionExecutionValue) inputValue)));
} else if (inputValue instanceof TreeArtifactValue) {
directoryInputsBuilder.add(Pair.of(input, (TreeArtifactValue) inputValue));
} else {
// We do not recurse in aggregating middleman artifacts.
Preconditions.checkState(
!(inputValue instanceof AggregatingArtifactValue),
"%s %s %s",
artifact,
action,
inputValue);
}
}
ImmutableList<Pair<Artifact, FileArtifactValue>> fileInputs =
ImmutableList.sortedCopyOf(
Comparator.comparing(pair -> pair.getFirst().getExecPathString()),
fileInputsBuilder.build());
ImmutableList<Pair<Artifact, TreeArtifactValue>> directoryInputs =
ImmutableList.sortedCopyOf(
Comparator.comparing(pair -> pair.getFirst().getExecPathString()),
directoryInputsBuilder.build());
return (action.getActionType() == MiddlemanType.AGGREGATING_MIDDLEMAN)
? new AggregatingArtifactValue(fileInputs, directoryInputs, value)
: new RunfilesArtifactValue(fileInputs, directoryInputs, value);
}
/**
* Returns whether this value needs to contain the data of all its inputs. Currently only tests to
* see if the action is an aggregating or runfiles middleman action. However, may include Fileset
* artifacts in the future.
*/
private static boolean isAggregatingValue(ActionAnalysisMetadata action) {
switch (action.getActionType()) {
case AGGREGATING_MIDDLEMAN:
case RUNFILES_MIDDLEMAN:
return true;
default:
return false;
}
}
@Override
public String extractTag(SkyKey skyKey) {
return Label.print(((Artifact) skyKey).getOwner());
}
static ActionLookupKey getActionLookupKey(Artifact artifact) {
ArtifactOwner artifactOwner = artifact.getArtifactOwner();
Preconditions.checkState(
artifactOwner instanceof ActionLookupKey, "%s %s", artifact, artifactOwner);
return (ActionLookupKey) artifactOwner;
}
@Nullable
static ActionLookupValue getActionLookupValue(
ActionLookupKey actionLookupKey, SkyFunction.Environment env) throws InterruptedException {
ActionLookupValue value = (ActionLookupValue) env.getValue(actionLookupKey);
if (value == null) {
Preconditions.checkState(
actionLookupKey == CoverageReportValue.COVERAGE_REPORT_KEY,
"Not-yet-present artifact owner: %s",
actionLookupKey);
return null;
}
return value;
}
static final class ArtifactFunctionException extends SkyFunctionException {
ArtifactFunctionException(IOException e, Transience transience) {
super(e, transience);
}
}
private static String constructErrorMessage(Artifact artifact) {
if (artifact.getOwner() == null) {
return String.format("missing input file '%s'", artifact.getExecPathString());
} else {
return String.format(
"missing input file '%s', owner: '%s'",
artifact.getExecPathString(), artifact.getOwner());
}
}
/** Describes dependencies of derived artifacts. */
// TODO(b/19539699): extend this to comprehensively support all special artifact types (e.g.
// middleman, etc).
static class ArtifactDependencies {
private final DerivedArtifact artifact;
private final ActionLookupValue actionLookupValue;
private ArtifactDependencies(DerivedArtifact artifact, ActionLookupValue actionLookupValue) {
this.artifact = artifact;
this.actionLookupValue = actionLookupValue;
}
/**
* Constructs an {@link ArtifactDependencies} for the provided {@code derivedArtifact}. Returns
* {@code null} if any dependencies are not yet ready.
*/
@Nullable
static ArtifactDependencies discoverDependencies(
Artifact.DerivedArtifact derivedArtifact, SkyFunction.Environment env)
throws InterruptedException {
ActionLookupData generatingActionKey = derivedArtifact.getGeneratingActionKey();
ActionLookupValue actionLookupValue =
ArtifactFunction.getActionLookupValue(generatingActionKey.getActionLookupKey(), env);
if (actionLookupValue == null) {
return null;
}
return new ArtifactDependencies(derivedArtifact, actionLookupValue);
}
boolean isTemplateActionForTreeArtifact() {
return artifact.isTreeArtifact()
&& actionLookupValue.getActions().get(artifact.getGeneratingActionKey().getActionIndex())
instanceof ActionTemplate;
}
/**
* Returns action template expansion information or {@code null} if that information is
* unavailable.
*
* <p>Must not be called if {@code !isTemplateActionForTreeArtifact()}.
*/
@Nullable
ActionTemplateExpansion getActionTemplateExpansion(SkyFunction.Environment env)
throws InterruptedException {
Preconditions.checkState(
isTemplateActionForTreeArtifact(), "Action is unexpectedly non-template: %s", this);
ActionTemplateExpansionValue.ActionTemplateExpansionKey key =
ActionTemplateExpansionValue.key(
artifact.getArtifactOwner(), artifact.getGeneratingActionKey().getActionIndex());
ActionTemplateExpansionValue value = (ActionTemplateExpansionValue) env.getValue(key);
if (value == null) {
return null;
}
return new ActionTemplateExpansion(key, value);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("artifact", artifact)
.add("generatingActionKey", artifact.getGeneratingActionKey())
.add("actionLookupValue", actionLookupValue)
.toString();
}
}
static class ActionTemplateExpansion {
private final ActionTemplateExpansionValue.ActionTemplateExpansionKey key;
private final ActionTemplateExpansionValue value;
private ActionTemplateExpansion(
ActionTemplateExpansionValue.ActionTemplateExpansionKey key,
ActionTemplateExpansionValue value) {
this.key = key;
this.value = value;
}
ActionTemplateExpansionValue.ActionTemplateExpansionKey getKey() {
return key;
}
ActionTemplateExpansionValue getValue() {
return value;
}
ImmutableList<ActionLookupData> getExpandedActionExecutionKeys() {
int numActions = value.getNumActions();
ImmutableList.Builder<ActionLookupData> expandedActionExecutionKeys =
ImmutableList.builderWithExpectedSize(numActions);
for (ActionAnalysisMetadata action : value.getActions()) {
expandedActionExecutionKeys.add(
((DerivedArtifact) action.getPrimaryOutput()).getGeneratingActionKey());
}
return expandedActionExecutionKeys.build();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.openoffice;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.LogRecord;
import com.sun.star.lang.Locale;
import com.sun.star.lang.XLocalizable;
import com.sun.star.lang.XServiceInfo;
import com.sun.star.lang.XServiceName;
import com.sun.star.uno.XComponentContext;
import com.sun.star.lib.uno.helper.WeakBase;
import org.apache.sis.util.Classes;
import org.apache.sis.util.Exceptions;
import org.apache.sis.util.logging.Logging;
import org.apache.sis.util.resources.Vocabulary;
/**
* Base class for methods to export as formulas in the Apache OpenOffice spread sheet.
*
* @author Martin Desruisseaux (IRD, Geomatys)
* @version 0.8
* @since 0.8
* @module
*/
public abstract class CalcAddins extends WeakBase implements XServiceName, XServiceInfo, XLocalizable {
/**
* Indirectly provides access to the service manager.
* For example {@code com.sun.star.sdb.DatabaseContext} holds databases registered with OpenOffice.
*/
protected final XComponentContext context;
/**
* Locale attribute required by {@code com.sun.star.lang.XLocalizable} interface.
*/
private Locale locale;
/**
* The locale as an object from the standard Java SDK.
* Will be fetched only when first needed.
*/
private transient java.util.Locale javaLocale;
/**
* The logger, fetched when first needed.
*/
private transient Logger logger;
/**
* Constructs add-ins for Calc.
*
* @param context the value to assign to the {@link #context} field.
*/
protected CalcAddins(final XComponentContext context) {
this.context = context;
}
/**
* The service name that can be used to create such an object by a factory.
*
* @return the service name.
*/
@Override
public abstract String getServiceName();
/**
* Provides the supported service names of the implementation, including also indirect service names.
*
* @return sequence of service names that are supported.
*/
@Override
public final String[] getSupportedServiceNames() {
return new String[] {getServiceName()};
}
/**
* Tests whether the specified service is supported, i.e. implemented by the implementation.
*
* @param name name of service to be tested.
* @return {@code true} if the service is supported, {@code false} otherwise.
*/
@Override
public final boolean supportsService(final String name) {
return name.equals(getServiceName());
}
/**
* Sets the locale to be used by this object.
*
* @param locale the new locale.
*/
@Override
public final synchronized void setLocale(final Locale locale) {
this.locale = locale;
javaLocale = null;
}
/**
* Returns the current locale used by this instance.
*
* @return the current locale.
*/
@Override
public final synchronized Locale getLocale() {
if (locale == null) {
locale = new Locale();
}
return locale;
}
/**
* Returns the locale as an object from the Java standard SDK.
*
* @return the current locale.
*/
protected final synchronized java.util.Locale getJavaLocale() {
if (javaLocale == null) {
if (locale != null) {
String language = locale.Language; if (language == null) language = "";
String country = locale.Country; if (country == null) country = "";
String variant = locale.Variant; if (variant == null) variant = "";
javaLocale = new java.util.Locale(language, country, variant);
} else {
javaLocale = java.util.Locale.getDefault();
}
}
return javaLocale;
}
/**
* The string to returns when a formula does not have any value to return.
*
* @return the string with a message for missing values.
*/
final String noResultString() {
return Vocabulary.getResources(getJavaLocale()).getString(Vocabulary.Keys.Unknown);
}
/**
* Returns the minimal length of the specified arrays. In the special case where one array
* has a length of 1, we assume that this single element will be repeated for all elements
* in the other array.
*/
static int getMinimalLength(final Object[] array1, final Object[] array2) {
if (array1 == null || array2 == null) {
return 0;
}
if (array1.length == 1) return array2.length;
if (array2.length == 1) return array1.length;
return Math.min(array1.length, array2.length);
}
/**
* Returns the localized message from the specified exception. If no message is available,
* returns a default string. This method never return a null value.
*
* @param exception the exception for which to get the localized message.
* @return an error message to report to the user.
*/
protected final String getLocalizedMessage(final Throwable exception) {
final String message = Exceptions.getLocalizedMessage(exception, getJavaLocale());
if (message != null) {
return message;
}
return Classes.getShortClassName(exception);
}
/**
* Reports an exception. This is used if an exception occurred in a method that can not return
* a {@link String} instance. This method logs the stack trace at {@link Level#WARNING}.
*
* @param method the method from which an exception occurred.
* @param exception the exception.
*/
final void reportException(final String method, final Exception exception) {
final Logger logger = getLogger();
final LogRecord record = new LogRecord(Level.WARNING, getLocalizedMessage(exception));
record.setLoggerName(logger.getName());
record.setSourceClassName(getClass().getName());
record.setSourceMethodName(method);
record.setThrown(exception);
logger.log(record);
}
/**
* Returns the logger to use for logging warnings.
*
* @return the logger to use.
*/
protected final synchronized Logger getLogger() {
if (logger == null) {
logger = Logging.getLogger(Registration.LOGGER);
}
return logger;
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.apps.svgbrowser;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Frame;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Shape;
import java.awt.event.ActionEvent;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import java.text.AttributedCharacterIterator;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.ResourceBundle;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.ButtonGroup;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JTextField;
import org.apache.batik.gvt.GVTTreeWalker;
import org.apache.batik.gvt.GraphicsNode;
import org.apache.batik.gvt.TextNode;
import org.apache.batik.gvt.text.Mark;
import org.apache.batik.swing.JSVGCanvas;
import org.apache.batik.util.resources.ResourceManager;
import org.apache.batik.util.gui.ExtendedGridBagConstraints;
import org.apache.batik.util.gui.resource.ActionMap;
import org.apache.batik.util.gui.resource.ButtonFactory;
import org.apache.batik.util.gui.resource.MissingListenerException;
/**
* This class represents a Dialog that lets the user searching for text inside
* an SVG document.
*
* @author <a href="mailto:tkormann@apache.org">Thierry Kormann</a>
* @version $Id$
*/
public class FindDialog extends JDialog implements ActionMap {
/**
* The resource file name
*/
protected static final String RESOURCES =
"org.apache.batik.apps.svgbrowser.resources.FindDialog";
// action names
public static final String FIND_ACTION = "FindButtonAction";
public static final String CLEAR_ACTION = "ClearButtonAction";
public static final String CLOSE_ACTION = "CloseButtonAction";
/**
* The resource bundle
*/
protected static ResourceBundle bundle;
/**
* The resource manager
*/
protected static ResourceManager resources;
static {
bundle = ResourceBundle.getBundle(RESOURCES, Locale.getDefault());
resources = new ResourceManager(bundle);
}
/** The button factory */
protected ButtonFactory buttonFactory;
/** The GVT root into which text is searched. */
protected GraphicsNode gvtRoot;
/** The GVTTreeWalker used to scan the GVT Tree. */
protected GVTTreeWalker walker;
/** The current index in the TextNode's string. */
protected int currentIndex;
/** The TextField that owns the text to search. */
protected JTextField search;
/** The next button. */
protected JButton findButton;
/** The next button. */
protected JButton clearButton;
/** The cancel button. */
protected JButton closeButton;
/** The case sensitive button. */
protected JCheckBox caseSensitive;
/** The canvas. */
protected JSVGCanvas svgCanvas;
/** The highlight button. */
protected JRadioButton highlightButton;
/** The highlight and center button. */
protected JRadioButton highlightCenterButton;
/** The highlight center and zoom button. */
protected JRadioButton highlightCenterZoomButton;
/**
* Constructs a new <code>FindDialog</code>.
*/
public FindDialog(JSVGCanvas svgCanvas) {
this(null, svgCanvas);
}
/**
* Constructs a new <code>FindDialog</code>.
*/
public FindDialog(Frame owner, JSVGCanvas svgCanvas) {
super(owner, resources.getString("Dialog.title"));
this.svgCanvas = svgCanvas;
buttonFactory = new ButtonFactory(bundle, this);
listeners.put(FIND_ACTION,
new FindButtonAction());
listeners.put(CLEAR_ACTION,
new ClearButtonAction());
listeners.put(CLOSE_ACTION,
new CloseButtonAction());
JPanel p = new JPanel(new BorderLayout());
p.setBorder(BorderFactory.createEmptyBorder(4, 4, 4, 4));
p.add(createFindPanel(), BorderLayout.CENTER);
p.add(createShowResultPanel(), BorderLayout.SOUTH);
getContentPane().add(p, BorderLayout.CENTER);
getContentPane().add(createButtonsPanel(), BorderLayout.SOUTH);
}
/**
* Creates the Find panel.
*/
protected JPanel createFindPanel() {
JPanel panel = new JPanel(new GridBagLayout());
panel.setBorder(BorderFactory.createTitledBorder
(BorderFactory.createEtchedBorder(),
resources.getString("Panel.title")));
ExtendedGridBagConstraints gbc = new ExtendedGridBagConstraints();
gbc.insets = new Insets(2, 2, 2, 2);
gbc.anchor = ExtendedGridBagConstraints.EAST;
gbc.fill = ExtendedGridBagConstraints.NONE;
gbc.setWeight(0, 0);
gbc.setGridBounds(0, 0, 1, 1);
panel.add(new JLabel(resources.getString("FindLabel.text")), gbc);
gbc.fill = ExtendedGridBagConstraints.HORIZONTAL;
gbc.setWeight(1.0, 0);
gbc.setGridBounds(1, 0, 2, 1);
panel.add(search = new JTextField(20), gbc);
gbc.fill = ExtendedGridBagConstraints.NONE;
gbc.anchor = ExtendedGridBagConstraints.WEST;
gbc.setWeight(0, 0);
gbc.setGridBounds(1, 1, 1, 1);
caseSensitive = buttonFactory.createJCheckBox("CaseSensitiveCheckBox");
panel.add(caseSensitive, gbc);
return panel;
}
protected JPanel createShowResultPanel() {
JPanel panel = new JPanel(new GridBagLayout());
panel.setBorder(BorderFactory.createTitledBorder
(BorderFactory.createEtchedBorder(),
resources.getString("ShowResultPanel.title")));
ExtendedGridBagConstraints gbc = new ExtendedGridBagConstraints();
gbc.insets = new Insets(2, 2, 2, 2);
gbc.anchor = ExtendedGridBagConstraints.WEST;
gbc.fill = ExtendedGridBagConstraints.NONE;
gbc.setWeight(0, 0);
ButtonGroup grp = new ButtonGroup();
highlightButton = buttonFactory.createJRadioButton("Highlight");
highlightButton.setSelected(true);
grp.add(highlightButton);
gbc.setGridBounds(0, 0, 1, 1);
panel.add(highlightButton, gbc);
highlightCenterButton =
buttonFactory.createJRadioButton("HighlightAndCenter");
grp.add(highlightCenterButton);
gbc.setGridBounds(0, 1, 1, 1);
panel.add(highlightCenterButton, gbc);
highlightCenterZoomButton =
buttonFactory.createJRadioButton("HighlightCenterAndZoom");
grp.add(highlightCenterZoomButton);
gbc.setGridBounds(0, 2, 1, 1);
panel.add(highlightCenterZoomButton, gbc);
return panel;
}
/**
* Creates the buttons panel
*/
protected JPanel createButtonsPanel() {
JPanel panel = new JPanel(new FlowLayout(FlowLayout.RIGHT));
panel.add(findButton = buttonFactory.createJButton("FindButton"));
panel.add(clearButton = buttonFactory.createJButton("ClearButton"));
panel.add(closeButton = buttonFactory.createJButton("CloseButton"));
return panel;
}
/**
* Sets the graphics node into which text is searched.
* @param gvtRoot the GVT root node
*/
public void setGraphicsNode(GraphicsNode gvtRoot) {
this.gvtRoot = gvtRoot;
if (gvtRoot != null) {
this.walker = new GVTTreeWalker(gvtRoot);
} else {
this.walker = null;
}
}
/**
* Returns the next GraphicsNode that matches the specified string or null
* if any.
*
* @param text the text to match
*/
protected GraphicsNode getNext(String text) {
if (walker == null && gvtRoot != null) {
walker = new GVTTreeWalker(gvtRoot);
}
GraphicsNode gn = walker.getCurrentGraphicsNode();
int index = match(gn, text, currentIndex+text.length());
if (index >= 0) {
currentIndex = index;
} else {
currentIndex = 0;
gn = walker.nextGraphicsNode();
while (gn != null &&
((currentIndex = match(gn, text, currentIndex)) < 0)) {
currentIndex = 0;
gn = walker.nextGraphicsNode();
}
}
return gn;
}
/**
* Returns the index inside the specified TextNode of the
* specified text, or -1 if not found.
*
* @param node the graphics node to check
* @param text the text use to match
* @param index the index from which to start */
protected int match(GraphicsNode node, String text, int index) {
if (!(node instanceof TextNode)
|| !node.isVisible()
|| text == null || text.length() == 0) {
return -1;
}
String s = ((TextNode)node).getText();
if (!caseSensitive.isSelected()) {
s = s.toLowerCase();
text = text.toLowerCase();
}
return s.indexOf(text, index);
}
/**
* Shows the current selected <code>TextNode</code>.
*/
protected void showSelectedGraphicsNode() {
GraphicsNode gn = walker.getCurrentGraphicsNode();
if (!(gn instanceof TextNode)) {
return;
}
TextNode textNode = (TextNode)gn;
// mark the selection of the substring found
String text = textNode.getText();
String pattern = search.getText();
if (!caseSensitive.isSelected()) {
text = text.toLowerCase();
pattern = pattern.toLowerCase();
}
int end = text.indexOf(pattern, currentIndex);
AttributedCharacterIterator aci =
textNode.getAttributedCharacterIterator();
aci.first();
for (int i=0; i < end; ++i) {
aci.next();
}
Mark startMark = textNode.getMarkerForChar(aci.getIndex(), true);
for (int i = 0; i < pattern.length()-1; ++i) {
aci.next();
}
Mark endMark = textNode.getMarkerForChar(aci.getIndex(), false);
svgCanvas.select(startMark, endMark);
// zoom on the TextNode if needed
if (highlightButton.isSelected()) {
return;
}
// get the highlight shape in GVT root (global) coordinate sytem
Shape s = textNode.getHighlightShape();
AffineTransform at;
if (highlightCenterZoomButton.isSelected()) {
at = svgCanvas.getInitialTransform();
} else {
at = svgCanvas.getRenderingTransform();
}
// get the bounds of the highlight shape in the canvas coordinate system
Rectangle2D gnb = at.createTransformedShape(s).getBounds();
Dimension canvasSize = svgCanvas.getSize();
// translate the highlight region to (0, 0) in the canvas coordinate
// system
AffineTransform Tx = AffineTransform.getTranslateInstance
(-gnb.getX()-gnb.getWidth()/2,
-gnb.getY()-gnb.getHeight()/2);
if (highlightCenterZoomButton.isSelected()) {
// zoom on the highlight shape such as the shape takes x% of the
// canvas size
double sx = canvasSize.width/gnb.getWidth();
double sy = canvasSize.height/gnb.getHeight();
double scale = Math.min(sx, sy) / 8;
if (scale > 1) {
Tx.preConcatenate
(AffineTransform.getScaleInstance(scale, scale));
}
}
Tx.preConcatenate(AffineTransform.getTranslateInstance
(canvasSize.width/2, canvasSize.height/2));
// take into account the initial transform
AffineTransform newRT = new AffineTransform(at);
newRT.preConcatenate(Tx);
// change the rendering transform
svgCanvas.setRenderingTransform(newRT);
}
// ActionMap implementation
/**
* The map that contains the listeners
*/
protected Map listeners = new HashMap(10);
/**
* Returns the action associated with the given string
* or null on error
* @param key the key mapped with the action to get
* @throws MissingListenerException if the action is not found
*/
public Action getAction(String key) throws MissingListenerException {
return (Action)listeners.get(key);
}
//////////////////////////////////////////////////////////////////////////
// Action implementation
//////////////////////////////////////////////////////////////////////////
/**
* The action associated to the 'find' button.
*/
protected class FindButtonAction extends AbstractAction {
public void actionPerformed(ActionEvent e) {
String text = search.getText();
if (text == null || text.length() == 0) {
return;
}
GraphicsNode gn = getNext(text);
if (gn != null) {
showSelectedGraphicsNode();
} else {
// end of document reached
walker = null;
JOptionPane.showMessageDialog(FindDialog.this,
resources.getString("End.text"),
resources.getString("End.title"),
JOptionPane.INFORMATION_MESSAGE);
}
}
}
/**
* The action associated to the 'clear' button.
*/
protected class ClearButtonAction extends AbstractAction {
public void actionPerformed(ActionEvent e) {
search.setText(null);
walker = null;
}
}
/**
* The action associated to the 'close' button.
*/
protected class CloseButtonAction extends AbstractAction {
public void actionPerformed(ActionEvent e) {
dispose();
}
}
}
| |
package au.org.aurin.wif.controller;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URL;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.birt.core.framework.Platform;
import org.eclipse.birt.report.engine.api.EXCELRenderOption;
import org.eclipse.birt.report.engine.api.EngineConfig;
import org.eclipse.birt.report.engine.api.EngineConstants;
import org.eclipse.birt.report.engine.api.HTMLRenderOption;
import org.eclipse.birt.report.engine.api.IPDFRenderOption;
import org.eclipse.birt.report.engine.api.IReportEngine;
import org.eclipse.birt.report.engine.api.IReportEngineFactory;
import org.eclipse.birt.report.engine.api.IReportRunnable;
import org.eclipse.birt.report.engine.api.IRunAndRenderTask;
import org.eclipse.birt.report.engine.api.PDFRenderOption;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.FileSystemXmlApplicationContext;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import au.org.aurin.wif.exception.config.ParsingException;
import au.org.aurin.wif.exception.config.WifInvalidConfigException;
import au.org.aurin.wif.exception.validate.IncompleteDemandScenarioException;
import au.org.aurin.wif.exception.validate.WifInvalidInputException;
import au.org.aurin.wif.io.GeodataFinder;
import au.org.aurin.wif.model.Projection;
import au.org.aurin.wif.model.WifProject;
import au.org.aurin.wif.model.demand.AreaRequirement;
import au.org.aurin.wif.model.demand.DemandConfig;
import au.org.aurin.wif.model.demand.DemandScenario;
import au.org.aurin.wif.model.reports.BirtReport;
import au.org.aurin.wif.model.reports.suitability.CategoryItem;
import au.org.aurin.wif.model.reports.suitability.SuitabilityAnalysisItem;
import au.org.aurin.wif.model.reports.suitability.SuitabilityAnalysisReport;
import au.org.aurin.wif.model.suitability.SuitabilityScenario;
import au.org.aurin.wif.repo.demand.DemandConfigDao;
import au.org.aurin.wif.svc.demand.DemandScenarioService;
import au.org.aurin.wif.svc.report.ReportService;
import au.org.aurin.wif.svc.suitability.SuitabilityScenarioService;
@Controller
@RequestMapping(OWIURLs.PROJECT_SVC_URI)
public class BirtDemandControllerNew {
/**
* The Class BirtDemandControllerNew for generating demand reports using Birt.
*/
/** The suitability scenario service. */
@Resource
private SuitabilityScenarioService suitabilityScenarioService;
/** The demand scenario service. */
@Resource
private DemandScenarioService demandScenarioService;
/** The report service. */
@Autowired
private ReportService reportService;
@Autowired
private SuitabilityAnalysisReport SuitabilityAnalysisReport;
private @Autowired
ApplicationContext servletContext;
private @Autowired
BirtReport BirtReport;
/** The geodata finder. */
@Autowired
private GeodataFinder geodataFinder;
@Autowired
private DemandConfigDao demandConfigDao;
/** The Constant LOGGER. */
private static final Logger LOGGER = LoggerFactory
.getLogger(BirtDemandControllerNew.class);
/**
* Generates demand scenario html report
*
* @param locale
* @param model
* @param id
* @return
* @throws WifInvalidInputException
* @throws WifInvalidConfigException
* @throws ParsingException
* @throws IOException
* @throws IncompleteDemandScenarioException
*/
@RequestMapping(method = RequestMethod.GET, value = "/{projectId}/demandScenarios/{id}/htmlOutcome")
public @ResponseBody
String htmlDemandOutcome(final Locale locale, final Model model,
@PathVariable("id") final String id) throws WifInvalidInputException,
WifInvalidConfigException, ParsingException, IOException,
IncompleteDemandScenarioException {
LOGGER.info("Welcome Birt Report!");
LOGGER
.info("*******>> getBirt Html Report for Demand Scenario id ={}", id);
// SuitabilityScenario suitabilityScenario = suitabilityScenarioService
// .getSuitabilityScenario("59c979a3ed6fdab3ecf88de7260f3182");
final DemandScenario demandScenario = demandScenarioService
.getDemandScenario(id);
final WifProject project = demandScenario.getWifProject();
BirtReport = new BirtReport();
BirtReport.setProjectName(project.getLabel());
BirtReport.setScenarioName(demandScenario.getLabel());
final String demandConfigId = project.getDemandConfigId();
final DemandConfig demandConfig = demandConfigDao
.findDemandConfigById(demandConfigId);
StringBuilder str = new StringBuilder("<property name='data'><list>");
for (final Projection projection : demandConfig.getProjections()) {
}// end for projection
final List<AreaRequirement> areaRequirements = demandScenarioService
.getOutcome(id);
for (final AreaRequirement areaRequirement : areaRequirements) {
// if (areaRequirement.getProjectionLabel()
// .equals(projection.getLabel())) {
str.append("<list>");
str.append("<value>");
str.append(areaRequirement.getAllocationLULabel());
str.append("</value>");
str.append("<value>");
str.append(areaRequirement.getRequiredArea());
str.append("</value>");
str.append("<value>");
str.append(areaRequirement.getProjectionLabel());
str.append("</value>");
str.append("</list>");
}
str = str.append("</list>");
str = str.append("</property>");
//
final String tempDir = System.getProperty("java.io.tmpdir");
final File filexml = new File(tempDir + "/demand.xml");
if (!filexml.exists()) {
filexml.createNewFile();
}
final FileWriter fwxml = new FileWriter(tempDir + "/demand.xml");
final BufferedWriter bufferWritterxml = new BufferedWriter(fwxml);
String strxml = "<?xml version='1.0' encoding='UTF-8'?>\n"
+ "<beans xmlns='http://www.springframework.org/schema/beans'\n"
+ "xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:tx='http://www.springframework.org/schema/tx'\n"
+ "xmlns:context='http://www.springframework.org/schema/context'\n"
+ "xmlns:task='http://www.springframework.org/schema/task'\n"
+ "xsi:schemaLocation='\n"
+ "http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd\n"
+ "http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task-3.0.xsd\n"
+ "http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.0.xsd\n"
+ "http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd'>\n";
// + "<context:component-scan base-package='au.org.aurin.wif' />\n";
bufferWritterxml.write(strxml);
strxml = "<bean id='BirtReport' class='au.org.aurin.wif.model.reports.BirtReport'>"
+ "<property name='scenarioName' value='"
+ BirtReport.getScenarioName()
+ "' />"
+ "<property name='projectName' value='"
+ BirtReport.getProjectName()
+ "' />" + str + "</bean>\n</beans>";
bufferWritterxml.write(strxml);
bufferWritterxml.close();
fwxml.close();
final ApplicationContext context = new FileSystemXmlApplicationContext("/"
+ filexml.getPath());
// ///////////////birt
final StringBuilder mystrnew = new StringBuilder();
String mystr = "";
IReportEngine birtEngine = null;
try {
final EngineConfig config = new EngineConfig();
final BirtReport cc = (au.org.aurin.wif.model.reports.BirtReport) context
.getBean("BirtReport");
config.getAppContext().put(
EngineConstants.APPCONTEXT_BIRT_VIEWER_HTTPSERVET_REQUEST, context);
Platform.startup(config);
final IReportEngineFactory factory = (IReportEngineFactory) Platform
.createFactoryObject(IReportEngineFactory.EXTENSION_REPORT_ENGINE_FACTORY);
birtEngine = factory.createReportEngine(config);
final URL peopleresource = getClass().getResource("/demand.rptdesign"); // SpringSampleBIRTViewer.rptdesign
// TopNPercent.rptdesign
// System.out.println(peopleresource.getFile());
IReportRunnable runnable = null;
runnable = birtEngine.openReportDesign(peopleresource.getFile());
final IRunAndRenderTask runAndRenderTask = birtEngine
.createRunAndRenderTask(runnable);
// runAndRenderTask.setParameterValues(discoverAndSetParameters(runnable,
// request));
final HTMLRenderOption htmlOptions = new HTMLRenderOption();
htmlOptions.setOutputFormat("html");
final File file = new File(tempDir + "/demand.html");
if (!file.exists()) {
file.createNewFile();
}
htmlOptions.setOutputFileName(tempDir + "/demand.html");
// htmlOptions.setOutputFileName("alin.html");
// // Setting this to true removes html and body tags
htmlOptions.setEmbeddable(true);
String mystrStart = "<html><body>";
mystrStart = "<!DOCTYPE html PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN' 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'>";
mystrStart = mystrStart + "<html xmlns='http://www.w3.org/1999/xhtml'>";
mystrStart = mystrStart + "<head>";
mystrStart = mystrStart
+ "<meta http-equiv='Content-Type' content='text/html; charset=UTF-8' /></head>";
final String mystrEnd = "</body></html>";
mystr = mystrStart + mystr;
runAndRenderTask.setRenderOption(htmlOptions);
runAndRenderTask.run();
runAndRenderTask.close();
// File file = new File("alin.html");
FileInputStream fis = null;
final FileWriter fw = new FileWriter(tempDir + "/demand2.html");
final BufferedWriter bufferWritter = new BufferedWriter(fw);
bufferWritter.write(mystrStart);
fis = new FileInputStream(file);
// //System.out.println("Total file size to read (in bytes) : "
// + fis.available());
mystrnew.append(mystr);
int content;
while ((content = fis.read()) != -1) {
// convert to char and display it
bufferWritter.write((char) content);
// mystr = mystr + (char) content;
mystrnew.append((char) content);
}
bufferWritter.write(mystrEnd);
mystr = mystr + mystrEnd;
mystrnew.append(mystrEnd);
bufferWritter.close();
file.delete();
final File file1 = new File(tempDir + "/demand2.html");
file1.delete();
filexml.delete();
LOGGER.info(
"*******>> Completed Birt Demand Report for demand Scenario id ={}", id);
} catch (final Exception e) {
LOGGER.debug("getBirt Demand Report error : ={}", e.toString());
} finally {
birtEngine.destroy();
}
// ///////////////
return mystrnew.toString();
}
/**
* Generates demand scenario pdf report
*
* @param response
* @param id
* @return
* @throws WifInvalidInputException
* @throws WifInvalidConfigException
* @throws ParsingException
* @throws IOException
* @throws IncompleteDemandScenarioException
*/
@RequestMapping(method = RequestMethod.GET, value = "/{projectId}/demandScenarios/{id}/pdfOutcome")
@ResponseBody
public byte[] getPDFDemandOutcome(final HttpServletResponse response,
@PathVariable("id") final String id) throws WifInvalidInputException,
WifInvalidConfigException, ParsingException, IOException, IncompleteDemandScenarioException {
byte[] bytem = null;
LOGGER.info(
"*******>> getBirt Demand Report pdf for demand Scenario id ={}", id);
final DemandScenario demandScenario = demandScenarioService
.getDemandScenario(id);
final WifProject project = demandScenario.getWifProject();
BirtReport = new BirtReport();
BirtReport.setProjectName(project.getLabel());
BirtReport.setScenarioName(demandScenario.getLabel());
final String demandConfigId = project.getDemandConfigId();
final DemandConfig demandConfig = demandConfigDao
.findDemandConfigById(demandConfigId);
StringBuilder str = new StringBuilder("<property name='data'><list>");
for (final Projection projection : demandConfig.getProjections()) {
}// end for projection
final List<AreaRequirement> areaRequirements = demandScenarioService
.getOutcome(id);
for (final AreaRequirement areaRequirement : areaRequirements) {
// if (areaRequirement.getProjectionLabel()
// .equals(projection.getLabel())) {
str.append("<list>");
str.append("<value>");
str.append(areaRequirement.getAllocationLULabel());
str.append("</value>");
str.append("<value>");
str.append(areaRequirement.getRequiredArea());
str.append("</value>");
str.append("<value>");
str.append(areaRequirement.getProjectionLabel());
str.append("</value>");
str.append("</list>");
}
str = str.append("</list>");
str = str.append("</property>");
//
final String tempDir = System.getProperty("java.io.tmpdir");
final File filexml = new File(tempDir + "/demand.xml");
if (!filexml.exists()) {
filexml.createNewFile();
}
final FileWriter fwxml = new FileWriter(tempDir + "/demand.xml");
final BufferedWriter bufferWritterxml = new BufferedWriter(fwxml);
String strxml = "<?xml version='1.0' encoding='UTF-8'?>\n"
+ "<beans xmlns='http://www.springframework.org/schema/beans'\n"
+ "xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:tx='http://www.springframework.org/schema/tx'\n"
+ "xmlns:context='http://www.springframework.org/schema/context'\n"
+ "xmlns:task='http://www.springframework.org/schema/task'\n"
+ "xsi:schemaLocation='\n"
+ "http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd\n"
+ "http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task-3.0.xsd\n"
+ "http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.0.xsd\n"
+ "http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd'>\n";
// + "<context:component-scan base-package='au.org.aurin.wif' />\n";
bufferWritterxml.write(strxml);
strxml = "<bean id='BirtReport' class='au.org.aurin.wif.model.reports.BirtReport'>"
+ "<property name='scenarioName' value='"
+ BirtReport.getScenarioName()
+ "' />"
+ "<property name='projectName' value='"
+ BirtReport.getProjectName()
+ "' />" + str + "</bean>\n</beans>";
bufferWritterxml.write(strxml);
bufferWritterxml.close();
fwxml.close();
final ApplicationContext context = new FileSystemXmlApplicationContext("/"
+ filexml.getPath());
// ///////////////birt
// String mystr = "";
IReportEngine birtEngine = null;
try {
final EngineConfig config = new EngineConfig();
// System.out.println(servletContext.getBean("BirtReport").toString());
final BirtReport cc = (au.org.aurin.wif.model.reports.BirtReport) context
.getBean("BirtReport");
config.getAppContext().put(
EngineConstants.APPCONTEXT_BIRT_VIEWER_HTTPSERVET_REQUEST, context);
Platform.startup(config);
final IReportEngineFactory factory = (IReportEngineFactory) Platform
.createFactoryObject(IReportEngineFactory.EXTENSION_REPORT_ENGINE_FACTORY);
birtEngine = factory.createReportEngine(config);
final URL peopleresource = getClass().getResource(
"/demand.rptdesign"); // SpringSampleBIRTViewer.rptdesign
IReportRunnable runnable = null;
runnable = birtEngine.openReportDesign(peopleresource.getFile());
final IRunAndRenderTask runAndRenderTask = birtEngine
.createRunAndRenderTask(runnable);
final File file = new File(tempDir + "/pdemand.pdf");
if (!file.exists()) {
file.createNewFile();
}
final PDFRenderOption pdfOptions = new PDFRenderOption();
pdfOptions.setOutputFormat("pdf");
pdfOptions.setOutputFileName(tempDir + "/pdemand.pdf");
pdfOptions.setOption(IPDFRenderOption.PAGE_OVERFLOW,
IPDFRenderOption.FIT_TO_PAGE);
runAndRenderTask.setRenderOption(pdfOptions);
runAndRenderTask.run();
runAndRenderTask.close();
filexml.delete();
bytem = org.springframework.util.FileCopyUtils.copyToByteArray(file);
response.setHeader("Content-Disposition", "attachment; filename=\""
+ file.getName() + "\"");
response.setContentLength(bytem.length);
response.setContentType("application/pdf");
file.delete();
LOGGER
.info(
"*******>> Completed Birt Demand Report pdf for demand Scenario id ={}",
id);
} catch (final Exception e) {
LOGGER.debug("getBirtDemand pdf Report error : ={}", e.toString());
} finally {
birtEngine.destroy();
}
return bytem;
// /////////////////////////////////////////////////////////////////
}
/**
* Generates demand scenario xls report
*
* @param response
* @param id
* @return
* @throws WifInvalidInputException
* @throws WifInvalidConfigException
* @throws ParsingException
* @throws IOException
* @throws IncompleteDemandScenarioException
*/
@RequestMapping(method = RequestMethod.GET, value = "/{projectId}/demandScenarios/{id}/xlsOutcome")
@ResponseBody
public byte[] getXLSDemandOutcome(final HttpServletResponse response,
@PathVariable("id") final String id) throws WifInvalidInputException,
WifInvalidConfigException, ParsingException, IOException, IncompleteDemandScenarioException {
byte[] bytem = null;
LOGGER.info(
"*******>> getBirt Demand Report xls for demand Scenario id ={}", id);
final DemandScenario demandScenario = demandScenarioService
.getDemandScenario(id);
final WifProject project = demandScenario.getWifProject();
BirtReport = new BirtReport();
BirtReport.setProjectName(project.getLabel());
BirtReport.setScenarioName(demandScenario.getLabel());
final String demandConfigId = project.getDemandConfigId();
final DemandConfig demandConfig = demandConfigDao
.findDemandConfigById(demandConfigId);
StringBuilder str = new StringBuilder("<property name='data'><list>");
for (final Projection projection : demandConfig.getProjections()) {
}// end for projection
final List<AreaRequirement> areaRequirements = demandScenarioService
.getOutcome(id);
for (final AreaRequirement areaRequirement : areaRequirements) {
// if (areaRequirement.getProjectionLabel()
// .equals(projection.getLabel())) {
str.append("<list>");
str.append("<value>");
str.append(areaRequirement.getAllocationLULabel());
str.append("</value>");
str.append("<value>");
str.append(areaRequirement.getRequiredArea());
str.append("</value>");
str.append("<value>");
str.append(areaRequirement.getProjectionLabel());
str.append("</value>");
str.append("</list>");
}
str = str.append("</list>");
str = str.append("</property>");
//
final String tempDir = System.getProperty("java.io.tmpdir");
final File filexml = new File(tempDir + "/demand.xml");
if (!filexml.exists()) {
filexml.createNewFile();
}
final FileWriter fwxml = new FileWriter(tempDir + "/demand.xml");
final BufferedWriter bufferWritterxml = new BufferedWriter(fwxml);
String strxml = "<?xml version='1.0' encoding='UTF-8'?>\n"
+ "<beans xmlns='http://www.springframework.org/schema/beans'\n"
+ "xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:tx='http://www.springframework.org/schema/tx'\n"
+ "xmlns:context='http://www.springframework.org/schema/context'\n"
+ "xmlns:task='http://www.springframework.org/schema/task'\n"
+ "xsi:schemaLocation='\n"
+ "http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd\n"
+ "http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task-3.0.xsd\n"
+ "http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx-3.0.xsd\n"
+ "http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd'>\n";
// + "<context:component-scan base-package='au.org.aurin.wif' />\n";
bufferWritterxml.write(strxml);
strxml = "<bean id='BirtReport' class='au.org.aurin.wif.model.reports.BirtReport'>"
+ "<property name='scenarioName' value='"
+ BirtReport.getScenarioName()
+ "' />"
+ "<property name='projectName' value='"
+ BirtReport.getProjectName()
+ "' />" + str + "</bean>\n</beans>";
bufferWritterxml.write(strxml);
bufferWritterxml.close();
fwxml.close();
final ApplicationContext context = new FileSystemXmlApplicationContext("/"
+ filexml.getPath());
// ///////////////birt
// String mystr = "";
IReportEngine birtEngine = null;
try {
final EngineConfig config = new EngineConfig();
// System.out.println(servletContext.getBean("BirtReport").toString());
final BirtReport cc = (au.org.aurin.wif.model.reports.BirtReport) context
.getBean("BirtReport");
config.getAppContext().put(
EngineConstants.APPCONTEXT_BIRT_VIEWER_HTTPSERVET_REQUEST, context);
Platform.startup(config);
final IReportEngineFactory factory = (IReportEngineFactory) Platform
.createFactoryObject(IReportEngineFactory.EXTENSION_REPORT_ENGINE_FACTORY);
birtEngine = factory.createReportEngine(config);
final URL peopleresource = getClass().getResource(
"/demand.rptdesign"); // SpringSampleBIRTViewer.rptdesign
IReportRunnable runnable = null;
runnable = birtEngine.openReportDesign(peopleresource.getFile());
final IRunAndRenderTask runAndRenderTask = birtEngine
.createRunAndRenderTask(runnable);
final File file = new File(tempDir + "/xdemand.xls");
if (!file.exists()) {
file.createNewFile();
}
final PDFRenderOption pdfOptions = new PDFRenderOption();
pdfOptions.setOutputFormat("xls");
pdfOptions.setOutputFileName(tempDir + "/xdemand.xls");
pdfOptions.setOption(IPDFRenderOption.PAGE_OVERFLOW,
IPDFRenderOption.FIT_TO_PAGE);
runAndRenderTask.setRenderOption(pdfOptions);
runAndRenderTask.run();
runAndRenderTask.close();
filexml.delete();
bytem = org.springframework.util.FileCopyUtils.copyToByteArray(file);
response.setHeader("Content-Disposition", "attachment; filename=\""
+ file.getName() + "\"");
response.setContentLength(bytem.length);
response.setContentType("application/xls");
file.delete();
LOGGER
.info(
"*******>> Completed Birt Demand Report xls for demand Scenario id ={}",
id);
} catch (final Exception e) {
LOGGER.debug("getBirtDemand xls Report error : ={}", e.toString());
} finally {
birtEngine.destroy();
}
return bytem;
// /////////////////////////////////////////////////////////////////
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.restlet;
import java.io.IOException;
import java.net.CookieStore;
import java.net.HttpCookie;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.camel.AsyncCallback;
import org.apache.camel.CamelExchangeException;
import org.apache.camel.Exchange;
import org.apache.camel.impl.DefaultAsyncProducer;
import org.apache.camel.util.URISupport;
import org.restlet.Client;
import org.restlet.Context;
import org.restlet.Request;
import org.restlet.Response;
import org.restlet.Uniform;
import org.restlet.data.Cookie;
import org.restlet.data.CookieSetting;
import org.restlet.util.Series;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A Camel producer that acts as a client to Restlet server.
*
* @version
*/
public class RestletProducer extends DefaultAsyncProducer {
private static final Logger LOG = LoggerFactory.getLogger(RestletProducer.class);
private static final Pattern PATTERN = Pattern.compile("\\{([\\w\\.]*)\\}");
private Client client;
private boolean throwException;
public RestletProducer(RestletEndpoint endpoint) throws Exception {
super(endpoint);
this.throwException = endpoint.isThrowExceptionOnFailure();
client = new Client(endpoint.getProtocol());
client.setContext(new Context());
client.getContext().getParameters().add("socketTimeout", String.valueOf(endpoint.getSocketTimeout()));
client.getContext().getParameters().add("socketConnectTimeoutMs", String.valueOf(endpoint.getSocketTimeout()));
RestletComponent component = (RestletComponent) endpoint.getComponent();
if (component.getMaxConnectionsPerHost() != null && component.getMaxConnectionsPerHost() > 0) {
client.getContext().getParameters().add("maxConnectionsPerHost", String.valueOf(component.getMaxConnectionsPerHost()));
}
if (component.getMaxTotalConnections() != null && component.getMaxTotalConnections() > 0) {
client.getContext().getParameters().add("maxTotalConnections", String.valueOf(component.getMaxTotalConnections()));
}
}
@Override
public void doStart() throws Exception {
super.doStart();
client.start();
}
@Override
public void doStop() throws Exception {
client.stop();
super.doStop();
}
@Override
public void process(Exchange exchange) throws Exception {
RestletEndpoint endpoint = (RestletEndpoint) getEndpoint();
final RestletBinding binding = endpoint.getRestletBinding();
Request request;
String resourceUri = buildUri(endpoint, exchange);
URI uri = new URI(resourceUri);
request = new Request(endpoint.getRestletMethod(), resourceUri);
binding.populateRestletRequestFromExchange(request, exchange);
loadCookies(exchange, uri, request);
LOG.debug("Sending request synchronously: {} for exchangeId: {}", request, exchange.getExchangeId());
Response response = client.handle(request);
LOG.debug("Received response synchronously: {} for exchangeId: {}", response, exchange.getExchangeId());
if (response != null) {
Integer respCode = response.getStatus().getCode();
storeCookies(exchange, uri, response);
if (respCode > 207 && throwException) {
exchange.setException(populateRestletProducerException(exchange, response, respCode));
} else {
binding.populateExchangeFromRestletResponse(exchange, response);
}
}
}
private void storeCookies(Exchange exchange, URI uri, Response response) {
RestletEndpoint endpoint = (RestletEndpoint) getEndpoint();
if (endpoint.getCookieHandler() != null) {
Series<CookieSetting> cookieSettings = response.getCookieSettings();
CookieStore cookieJar = endpoint.getCookieHandler().getCookieStore(exchange);
for (CookieSetting s:cookieSettings) {
HttpCookie cookie = new HttpCookie(s.getName(), s.getValue());
cookie.setComment(s.getComment());
cookie.setDomain(s.getDomain());
cookie.setMaxAge(s.getMaxAge());
cookie.setPath(s.getPath());
cookie.setSecure(s.isSecure());
cookie.setVersion(s.getVersion());
cookieJar.add(uri, cookie);
}
}
}
private void loadCookies(Exchange exchange, URI uri, Request request) throws IOException {
RestletEndpoint endpoint = (RestletEndpoint) getEndpoint();
if (endpoint.getCookieHandler() != null) {
Series<Cookie> cookies = request.getCookies();
Map<String, List<String>> cookieHeaders = endpoint.getCookieHandler().loadCookies(exchange, uri);
// parse the cookies
for (String cookieHeader : cookieHeaders.keySet()) {
for (String cookieStr : cookieHeaders.get(cookieHeader)) {
for (HttpCookie cookie : HttpCookie.parse(cookieStr)) {
cookies.add(new Cookie(cookie.getVersion(), cookie.getName(), cookie.getValue(), cookie.getPath(), cookie.getDomain()));
}
}
}
}
}
@Override
public boolean process(final Exchange exchange, final AsyncCallback callback) {
RestletEndpoint endpoint = (RestletEndpoint) getEndpoint();
// force processing synchronously using different api
if (endpoint.isSynchronous()) {
try {
process(exchange);
} catch (Throwable e) {
exchange.setException(e);
}
callback.done(true);
return true;
}
LOG.trace("Processing asynchronously");
final RestletBinding binding = endpoint.getRestletBinding();
Request request;
try {
String resourceUri = buildUri(endpoint, exchange);
URI uri = new URI(resourceUri);
request = new Request(endpoint.getRestletMethod(), resourceUri);
binding.populateRestletRequestFromExchange(request, exchange);
loadCookies(exchange, uri, request);
} catch (Throwable e) {
// break out in case of exception
exchange.setException(e);
callback.done(true);
return true;
}
// process the request asynchronously
LOG.debug("Sending request asynchronously: {} for exchangeId: {}", request, exchange.getExchangeId());
client.handle(request, new Uniform() {
@Override
public void handle(Request request, Response response) {
LOG.debug("Received response asynchronously: {} for exchangeId: {}", response, exchange.getExchangeId());
try {
if (response != null) {
String resourceUri = buildUri(endpoint, exchange);
URI uri = new URI(resourceUri);
Integer respCode = response.getStatus().getCode();
storeCookies(exchange, uri, response);
if (respCode > 207 && throwException) {
exchange.setException(populateRestletProducerException(exchange, response, respCode));
} else {
binding.populateExchangeFromRestletResponse(exchange, response);
}
}
} catch (Throwable e) {
exchange.setException(e);
} finally {
callback.done(false);
}
}
});
// we continue routing async
return false;
}
private static String buildUri(RestletEndpoint endpoint, Exchange exchange) throws Exception {
// rest producer may provide an override url to be used which we should discard if using (hence the remove)
String uri = (String) exchange.getIn().removeHeader(Exchange.REST_HTTP_URI);
if (uri == null) {
uri = endpoint.getProtocol() + "://" + endpoint.getHost() + ":" + endpoint.getPort() + endpoint.getUriPattern();
}
// substitute { } placeholders in uri and use mandatory headers
LOG.trace("Substituting '{value}' placeholders in uri: {}", uri);
Matcher matcher = PATTERN.matcher(uri);
while (matcher.find()) {
String key = matcher.group(1);
String header = exchange.getIn().getHeader(key, String.class);
// header should be mandatory
if (header == null) {
throw new CamelExchangeException("Header with key: " + key + " not found in Exchange", exchange);
}
if (LOG.isTraceEnabled()) {
LOG.trace("Replacing: {} with header value: {}", matcher.group(0), header);
}
uri = matcher.replaceFirst(header);
// we replaced uri so reset and go again
matcher.reset(uri);
}
// include any query parameters if needed
if (endpoint.getQueryParameters() != null) {
uri = URISupport.appendParametersToURI(uri, endpoint.getQueryParameters());
}
// rest producer may provide an override query string to be used which we should discard if using (hence the remove)
String query = (String) exchange.getIn().removeHeader(Exchange.REST_HTTP_QUERY);
if (query == null) {
query = exchange.getIn().getHeader(Exchange.HTTP_QUERY, String.class);
}
if (query != null) {
LOG.trace("Adding query: {} to uri: {}", query, uri);
uri = addQueryToUri(uri, query);
}
LOG.trace("Using uri: {}", uri);
return uri;
}
protected static String addQueryToUri(String uri, String query) {
if (uri == null || uri.length() == 0) {
return uri;
}
StringBuilder answer = new StringBuilder();
int index = uri.indexOf('?');
if (index < 0) {
answer.append(uri);
answer.append("?");
answer.append(query);
} else {
answer.append(uri.substring(0, index));
answer.append("?");
answer.append(query);
String remaining = uri.substring(index + 1);
if (remaining.length() > 0) {
answer.append("&");
answer.append(remaining);
}
}
return answer.toString();
}
protected RestletOperationException populateRestletProducerException(Exchange exchange, Response response, int responseCode) {
RestletOperationException exception;
String uri = response.getRequest().getResourceRef().toString();
String statusText = response.getStatus().getDescription();
Map<String, String> headers = parseResponseHeaders(response, exchange);
String copy;
if (response.getEntity() != null) {
try {
copy = response.getEntity().getText();
} catch (Exception ex) {
copy = ex.toString();
}
} else {
copy = response.toString();
}
if (responseCode >= 300 && responseCode < 400) {
String redirectLocation;
if (response.getStatus().isRedirection()) {
redirectLocation = response.getLocationRef().getHostIdentifier();
exception = new RestletOperationException(uri, responseCode, statusText, redirectLocation, headers, copy);
} else {
//no redirect location
exception = new RestletOperationException(uri, responseCode, statusText, null, headers, copy);
}
} else {
//internal server error(error code 500)
exception = new RestletOperationException(uri, responseCode, statusText, null, headers, copy);
}
return exception;
}
protected Map<String, String> parseResponseHeaders(Object response, Exchange camelExchange) {
Map<String, String> answer = new HashMap<>();
if (response instanceof Response) {
for (Map.Entry<String, Object> entry : ((Response) response).getAttributes().entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
LOG.trace("Parse external header {}={}", key, value);
answer.put(key, value.toString());
}
}
return answer;
}
}
| |
/**
* $URL$
* $Id$
*
* Copyright (c) 2006-2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.sitestats.tool.wicket.providers;
import java.text.Collator;
import java.text.ParseException;
import java.text.RuleBasedCollator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.wicket.injection.web.InjectorHolder;
import org.apache.wicket.markup.repeater.data.IDataProvider;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.ResourceModel;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SitePage;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.sitestats.api.StatsManager;
import org.sakaiproject.sitestats.api.event.ToolInfo;
import org.sakaiproject.sitestats.api.report.ReportDef;
import org.sakaiproject.sitestats.api.report.ReportManager;
import org.sakaiproject.sitestats.tool.facade.Locator;
import org.sakaiproject.sitestats.tool.wicket.models.ReportDefModel;
public class ReportDefsProvider implements IDataProvider {
private static final long serialVersionUID = 1L;
public final static int MODE_MYREPORTS = 0;
public final static int MODE_PREDEFINED_REPORTS = 1;
public final static int MODE_MY_AND_PREDEFINED_REPORTS = 2;
private String siteId;
private int mode;
private boolean filterWithToolsInSite;
private boolean includeHidden;
private List<ReportDef> data;
private static Log log = LogFactory.getLog(ReportDefsProvider.class);
public ReportDefsProvider(String siteId, int mode, boolean filterWithToolsInSite, boolean includeHidden) {
InjectorHolder.getInjector().inject(this);
this.siteId = siteId;
this.mode = mode;
this.filterWithToolsInSite = filterWithToolsInSite;
this.includeHidden = includeHidden;
}
public Iterator iterator(int first, int count) {
return getReportDefs().iterator();
}
public List<ReportDef> getReportDefs() {
if(data == null) {
switch(mode) {
case MODE_MYREPORTS:
data = Locator.getFacade().getReportManager().getReportDefinitions(siteId, false, includeHidden);
break;
case MODE_PREDEFINED_REPORTS:
data = Locator.getFacade().getReportManager().getReportDefinitions(null, false, includeHidden);
break;
case MODE_MY_AND_PREDEFINED_REPORTS:
data = Locator.getFacade().getReportManager().getReportDefinitions(siteId, true, includeHidden);
break;
}
if(filterWithToolsInSite) {
data = filterWithToolsInSite(data);
}
data = fixReportParamsSiteIdForPredefinedReports(data);
Collections.sort(data, getReportDefComparator());
}
return data;
}
public IModel model(Object object) {
return new ReportDefModel((ReportDef) object);
}
public int size() {
return getReportDefs().size();
}
public void detach() {
data = null;
}
private List<ReportDef> fixReportParamsSiteIdForPredefinedReports(List<ReportDef> list) {
List<ReportDef> fixed = new ArrayList<ReportDef>();
for(ReportDef rd : list) {
if(rd.getSiteId() == null) {
// fix siteId for predefined reports
rd.getReportParams().setSiteId(siteId);
fixed.add(rd);
}else{
fixed.add(rd);
}
}
return fixed;
}
private List<ReportDef> filterWithToolsInSite(List<ReportDef> list) {
List<ReportDef> filtered = new ArrayList<ReportDef>();
if(list != null) {
try{
Site site = Locator.getFacade().getSiteService().getSite(siteId);
for(ReportDef rd : list){
if(canIncludeReport(rd, site)){
filtered.add(rd);
}
}
}catch(Exception e){
filtered = list;
}
}
return filtered;
}
private boolean canIncludeReport(ReportDef reportDef, Site site) {
List<ToolConfiguration> siteTools = new ArrayList<ToolConfiguration>();
for(Iterator<SitePage> iPages = site.getPages().iterator(); iPages.hasNext();){
SitePage page = iPages.next();
siteTools.addAll(page.getTools());
}
if(ReportManager.WHAT_VISITS.equals(reportDef.getReportParams().getWhat())) {
// keep visit based reports if site visits are enabled
if(Locator.getFacade().getStatsManager().isEnableSiteVisits()) {
return true;
}
}else if(ReportManager.WHAT_RESOURCES.equals(reportDef.getReportParams().getWhat())) {
// keep resource based reports if Resources tool is present
for(ToolConfiguration tc : siteTools) {
if(tc.getToolId().equals(StatsManager.RESOURCES_TOOLID)) {
return true;
}
}
}else if(ReportManager.WHAT_PRESENCES.equals(reportDef.getReportParams().getWhat())) {
// keep presence based reports if site presences are enabled
if(Locator.getFacade().getStatsManager().isEnableSitePresences()) {
return true;
}
}else{
// at least one tool from the selection must be present
if(ReportManager.WHAT_EVENTS_BYEVENTS.equals(reportDef.getReportParams().getWhatEventSelType())) {
for(ToolConfiguration tc : siteTools) {
Map<String,ToolInfo> map = Locator.getFacade().getEventRegistryService().getEventIdToolMap();
for(String eventId : reportDef.getReportParams().getWhatEventIds()) {
if(tc.getToolId().equals(map.get(eventId).getToolId())) {
return true;
}
}
}
}else if(ReportManager.WHAT_EVENTS_BYTOOL.equals(reportDef.getReportParams().getWhatEventSelType())) {
if(reportDef.getReportParams().getWhatToolIds().contains(ReportManager.WHAT_EVENTS_ALLTOOLS)) {
return true;
}else{
for(ToolConfiguration tc : siteTools) {
if(reportDef.getReportParams().getWhatToolIds().contains(tc.getToolId())) {
return true;
}
}
}
}
}
return false;
}
public final Comparator<ReportDef> getReportDefComparator() {
return new Comparator<ReportDef>() {
private transient Collator collator = Collator.getInstance();
{
try{
collator= new RuleBasedCollator(((RuleBasedCollator)Collator.getInstance()).getRules().replaceAll("<'\u005f'", "<' '<'\u005f'"));
}catch(ParseException e){
log.error("Unable to create RuleBasedCollator");
}
}
public int compare(ReportDef o1, ReportDef o2) {
String title1 = null;
String title2 = null;
if(o1.isTitleLocalized()) {
title1 = (String) new ResourceModel(o1.getTitleBundleKey()).getObject();
}else{
title1 = o1.getTitle();
}
if(o2.isTitleLocalized()) {
title2 = (String) new ResourceModel(o2.getTitleBundleKey()).getObject();
}else{
title2 = o2.getTitle();
}
return collator.compare(title1, title2);
}
};
}
}
| |
package test.e2e.stories;
import com.saucelabs.saucerest.SauceREST;
import org.jbehave.core.configuration.Configuration;
import org.jbehave.core.embedder.EmbedderControls;
import org.jbehave.core.embedder.StoryControls;
import org.jbehave.core.failures.FailingUponPendingStep;
import org.jbehave.core.failures.PendingStepStrategy;
import org.jbehave.core.io.CodeLocations;
import org.jbehave.core.io.LoadFromClasspath;
import org.jbehave.core.io.StoryFinder;
import org.jbehave.core.junit.JUnitStories;
import org.jbehave.core.reporters.CrossReference;
import org.jbehave.core.reporters.Format;
import org.jbehave.core.reporters.StoryReporterBuilder;
import org.jbehave.core.steps.InjectableStepsFactory;
import org.jbehave.core.steps.InstanceStepsFactory;
import org.jbehave.web.selenium.SeleniumConfiguration;
import org.jbehave.web.selenium.SeleniumContext;
import org.jbehave.web.selenium.SeleniumContextOutput;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriverService;
import org.openqa.selenium.remote.CapabilityType;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import test.e2e.steps.DishesSelectionSteps;
import test.e2e.steps.OrderSteps;
import utils.DataLoader;
import utils.StoryPathConverter;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.LinkedList;
import java.util.List;
import static org.jbehave.core.reporters.Format.CONSOLE;
import static org.jbehave.web.selenium.WebDriverHtmlOutput.WEB_DRIVER_HTML;
@RunWith(Parameterized.class)
public class BaseStory extends JUnitStories {
private static final String SELENIUM_VERSION = DataLoader.getWebDriverVersion();
private static final File CurrentPath = new File("");
private static final File PROJECT_PATH = new File(CurrentPath.getAbsolutePath());
private static final boolean RUN_LOCALLY = DataLoader.isLocalRun();
private static final String USERNAME = System.getenv("SAUCE_USERNAME");
private static final String ACCESS_KEY = System.getenv("SAUCE_ACCESS_KEY");
private static final String URL =
"https://" + USERNAME + ":" + ACCESS_KEY + "@ondemand.saucelabs.com:443/wd/hub";
private static final SauceREST sauceClient = new SauceREST(USERNAME, ACCESS_KEY);
private static final List<String> storiesNames =
StoryPathConverter.convertStringToListOfStoryPaths(DataLoader.getStoriesToRun());
private static WebDriver driver;
private static String CHROME_DRIVER_PATH = "/utils/" + SELENIUM_VERSION + "/chromedriver";
private static String buildTag;
static {
if (System.getProperty("os.name").startsWith("Windows")) {
CHROME_DRIVER_PATH += ".exe";
}
}
@Rule
public TestName name =
new TestName() {
public String getMethodName() {
return String.format("%s", super.getMethodName());
}
};
private String sessionId;
@Rule
public TestRule watcher =
new TestWatcher() {
@Override
protected void failed(Throwable throwable, Description description) {
sauceClient.jobFailed(sessionId);
System.out.println(String.format("https://saucelabs.com/tests/%s", sessionId));
}
@Override
protected void succeeded(Description description) {
sauceClient.jobPassed(sessionId);
}
};
private String browser;
private String os;
private String version;
private String deviceName;
private String deviceOrientation;
private PendingStepStrategy pendingStepStrategy = new FailingUponPendingStep();
private CrossReference crossReference = new CrossReference().withJsonOnly();
private SeleniumContext seleniumContext = new SeleniumContext();
private Format[] formats =
new Format[] {new SeleniumContextOutput(seleniumContext), CONSOLE, WEB_DRIVER_HTML};
private StoryReporterBuilder reporterBuilder =
new StoryReporterBuilder()
.withCodeLocation(CodeLocations.codeLocationFromClass(BaseStory.class))
.withFailureTrace(true)
.withFailureTraceCompression(true)
.withDefaultFormats()
.withFormats(formats)
.withCrossReference(crossReference);
/**
* Constructs a new instance of the test. Constructor requires three string parameters, which
* represent the operating system, version and browser to be used when launching a Sauce VM. The
* order of the parameters should be the same as that of the elements within the {@link
* #browsersStrings()} method.
*/
public BaseStory(String os, String version, String browser, String device, String orientation) {
EmbedderControls embedderControls = configuredEmbedder().embedderControls();
embedderControls.doIgnoreFailureInView(true);
configuredEmbedder().embedderControls();
this.os = os;
this.version = version;
this.browser = browser;
this.deviceName = device;
this.deviceOrientation = orientation;
}
/**
* Representing the browser combinations the test should be run against. The values in the String
* array are used as part of the invocation of the test constructor.
*
* @return a LinkedList containing String arrays,
*/
@Parameterized.Parameters
public static LinkedList browsersStrings() {
LinkedList<String[]> browsers = new LinkedList<>();
if (!RUN_LOCALLY) {
browsers.add(new String[] {"Windows 10", "49.0", "firefox", null, null});
browsers.add(new String[] {"OS X 10.11", "10.0", "safari", null, null});
browsers.add(new String[] {"OS X 10.10", "54.0", "chrome", null, null});
} else {
browsers.add(new String[] {null, null, null, null, null});
}
return browsers;
}
/** Perform setup of local ChromeDriver in case of running Test Suite locally. */
@BeforeClass
public static void createAndStartService() {
buildTag = System.getenv("BUILD_TAG");
if (RUN_LOCALLY) {
startLocalChromeDriver();
}
}
private static void startLocalChromeDriver() {
ChromeDriverService service;
try {
service =
new ChromeDriverService.Builder()
.usingDriverExecutable(new File(PROJECT_PATH + CHROME_DRIVER_PATH))
.usingAnyFreePort()
.build();
service.start();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
driver = new RemoteWebDriver(service.getUrl(), DesiredCapabilities.chrome());
}
public static WebDriver getDriver() {
return driver;
}
private void startSauceLabsWebDriver() {
DesiredCapabilities capabilities = new DesiredCapabilities();
capabilities.setCapability(CapabilityType.BROWSER_NAME, browser);
capabilities.setCapability(CapabilityType.VERSION, version);
capabilities.setCapability("deviceName", deviceName);
capabilities.setCapability("device-orientation", deviceOrientation);
capabilities.setCapability(CapabilityType.PLATFORM, os);
capabilities.setCapability("name", os + ": " + browser + " " + DataLoader.getStoriesToRun());
// Getting the build name.
// Using the Jenkins ENV var. You can use your own.
// If it is not set test will run without a build id.
if (buildTag != null) {
capabilities.setCapability("build", buildTag);
}
try {
driver = new RemoteWebDriver(new URL(URL), capabilities);
} catch (MalformedURLException ex) {
ex.printStackTrace();
}
this.sessionId = (((RemoteWebDriver) driver).getSessionId()).toString();
driver.manage().window().maximize();
}
/** Initial environment SetUp in case of running test suite on SauceLabs. */
@Before
public final void setUp() {
if (!RUN_LOCALLY) {
startSauceLabsWebDriver();
}
}
/** Perform clean of target/jbehave directory before test run. */
@Before
public final void clean() throws IOException {
try {
File dir = new File("target/jbehave");
for (File file : dir.listFiles()) {
if (!file.isDirectory()) {
file.delete();
}
}
} catch (Exception ex) {
}
}
/** Web browser should be closed after run of every story. */
@After
public void afterStory() throws Exception {
if (driver != null) {
driver.quit();
}
}
@Override
public final List<String> storyPaths() {
String codeLocation = "src/test/resources/";
return new StoryFinder().findPaths(codeLocation, storiesNames, null);
}
@Override
public final Configuration configuration() {
return new SeleniumConfiguration()
.useSeleniumContext(seleniumContext)
.usePendingStepStrategy(pendingStepStrategy)
.useStoryControls(new StoryControls().doResetStateBeforeScenario(false))
.useStoryLoader(new LoadFromClasspath(BaseStory.class))
.useStoryReporterBuilder(reporterBuilder);
}
@Override
public InjectableStepsFactory stepsFactory() {
return new InstanceStepsFactory(configuration(), new DishesSelectionSteps(), new OrderSteps());
}
/**
* In case you need value of SauceLabs job id, this method will
*
* @return the value of the Sauce Job id.
*/
public String getSessionId() {
return sessionId;
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.s3.model;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.event.ProgressListener;
import java.io.File;
import java.io.InputStream;
import java.io.Serializable;
import java.util.Date;
/**
* Abstract base class for a put object or put object like request.
*/
public abstract class AbstractPutObjectRequest extends AmazonWebServiceRequest implements
Cloneable, SSECustomerKeyProvider, SSEAwsKeyManagementParamsProvider, S3DataSource, Serializable {
/**
* The name of an existing bucket, to which this request will upload a new
* object. You must have {@link Permission#Write} permission granted to you
* in order to upload new objects to a bucket.
*/
private String bucketName;
/**
* The key under which to store the new object.
*/
private String key;
/**
* The file containing the data to be uploaded to Amazon S3. You must either
* specify a file or an InputStream containing the data to be uploaded to
* Amazon S3.
*/
private File file;
/**
* The InputStream containing the data to be uploaded to Amazon S3. You must
* either specify a file or an InputStream containing the data to be
* uploaded to Amazon S3.
*/
private transient InputStream inputStream;
/**
* Optional metadata instructing Amazon S3 how to handle the uploaded data
* (e.g. custom user metadata, hooks for specifying content type, etc.). If
* you are uploading from an InputStream, you <bold>should always</bold>
* specify metadata with the content size set, otherwise the contents of the
* InputStream will have to be buffered in memory before they can be sent to
* Amazon S3, which can have very negative performance impacts.
*/
private ObjectMetadata metadata;
/**
* An optional pre-configured access control policy to use for the new
* object. Ignored in favor of accessControlList, if present.
*/
private CannedAccessControlList cannedAcl;
/**
* An optional access control list to apply to the new object. If specified,
* cannedAcl will be ignored.
*/
private AccessControlList accessControlList;
/**
* The optional Amazon S3 storage class to use when storing the new object.
* If not specified, the default, standard storage class will be used.
* <p>
* For more information on Amazon S3 storage classes and available values,
* see the {@link StorageClass} enumeration.
*/
private String storageClass;
/** The optional redirect location about an object */
private String redirectLocation;
/**
* The optional customer-provided server-side encryption key to use to
* encrypt the uploaded object.
*/
private SSECustomerKey sseCustomerKey;
/**
* The optional AWS Key Management system parameters to be used to encrypt
* the the object on the server side.
*/
private SSEAwsKeyManagementParams sseAwsKeyManagementParams;
private ObjectTagging tagging;
private String objectLockMode;
private Date objectLockRetainUntilDate;
private String objectLockLegalHoldStatus;
/**
* Constructs a new
* {@link AbstractPutObjectRequest} object to upload a file to the
* specified bucket and key. After constructing the request,
* users may optionally specify object metadata or a canned ACL as well.
*
* @param bucketName
* The name of an existing bucket to which the new object will be
* uploaded.
* @param key
* The key under which to store the new object.
* @param file
* The path of the file to upload to Amazon S3.
*/
public AbstractPutObjectRequest(String bucketName, String key, File file) {
this.bucketName = bucketName;
this.key = key;
this.file = file;
}
/**
* Constructs a new {@link AbstractPutObjectRequest} object with redirect location.
* After constructing the request, users may optionally specify object
* metadata or a canned ACL as well.
*
* @param bucketName
* The name of an existing bucket to which the new object will be
* uploaded.
* @param key
* The key under which to store the new object.
* @param redirectLocation
* The redirect location of this new object.
*/
public AbstractPutObjectRequest(String bucketName, String key,
String redirectLocation) {
this.bucketName = bucketName;
this.key = key;
this.redirectLocation = redirectLocation;
}
/**
* Constructs a new
* {@link AbstractPutObjectRequest} object to upload a stream of data to
* the specified bucket and key. After constructing the request,
* users may optionally specify object metadata or a canned ACL as well.
* <p>
* Content length for the data stream <b>must</b> be
* specified in the object metadata parameter; Amazon S3 requires it
* be passed in before the data is uploaded. Failure to specify a content
* length will cause the entire contents of the input stream to be buffered
* locally in memory so that the content length can be calculated, which can
* result in negative performance problems.
* </p>
*
* @param bucketName
* The name of an existing bucket to which the new object will be
* uploaded.
* @param key
* The key under which to store the new object.
* @param input
* The stream of data to upload to Amazon S3.
* @param metadata
* The object metadata. At minimum this specifies the
* content length for the stream of data being uploaded.
*/
protected AbstractPutObjectRequest(String bucketName, String key,
InputStream input, ObjectMetadata metadata) {
this.bucketName = bucketName;
this.key = key;
this.inputStream = input;
this.metadata = metadata;
}
/**
* Gets the name of the existing bucket where this request will
* upload a new object to.
* In order to upload the object,
* users must have {@link Permission#Write} permission granted.
*
* @return The name of an existing bucket where this request will
* upload a new object to.
*
* @see AbstractPutObjectRequest#setBucketName(String)
* @see AbstractPutObjectRequest#withBucketName(String)
*/
public String getBucketName() {
return bucketName;
}
/**
* Sets the name of an existing bucket where this request will
* upload a new object to. In order to upload the object,
* users must have {@link Permission#Write} permission granted.
*
* @param bucketName
* The name of an existing bucket where this request will
* upload a new object to.
* In order to upload the object,
* users must have {@link Permission#Write} permission granted.
*
* @see AbstractPutObjectRequest#getBucketName()
* @see AbstractPutObjectRequest#withBucketName(String)
*/
public void setBucketName(String bucketName) {
this.bucketName = bucketName;
}
/**
* Sets the name of the bucket where this request will upload a new
* object to. Returns this object, enabling additional method calls to be
* chained together.
* <p>
* In order to upload the object,
* users must have {@link Permission#Write} permission granted.
*
* @param bucketName
* The name of an existing bucket where this request will
* upload a new object to.
* In order to upload the object,
* users must have {@link Permission#Write} permission granted.
*
* @return This {@link AbstractPutObjectRequest}, enabling additional method calls to be
* chained together.
*
* @see AbstractPutObjectRequest#getBucketName()
* @see AbstractPutObjectRequest#setBucketName(String)
*/
public <T extends AbstractPutObjectRequest> T withBucketName(
String bucketName) {
setBucketName(bucketName);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
/**
* Gets the key under which to store the new object.
*
* @return The key under which to store the new object.
*
* @see AbstractPutObjectRequest#setKey(String)
* @see AbstractPutObjectRequest#withKey(String)
*/
public String getKey() {
return key;
}
/**
* Sets the key under which to store the new object.
*
* @param key
* The key under which to store the new object.
*
* @see AbstractPutObjectRequest#getKey()
* @see AbstractPutObjectRequest#withKey(String)
*/
public void setKey(String key) {
this.key = key;
}
/**
* Sets the key under which to store the new object. Returns this object,
* enabling additional method calls to be chained together.
*
* @param key
* The key under which to store the new object.
*
* @return This {@link AbstractPutObjectRequest}, enabling additional method calls to be
* chained together.
*
* @see AbstractPutObjectRequest#getKey()
* @see AbstractPutObjectRequest#setKey(String)
*/
public <T extends AbstractPutObjectRequest> T withKey(String key) {
setKey(key);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
/**
* Gets the optional Amazon S3 storage class to use when storing the new
* object. If not specified, the default standard storage class is
* used when storing the object.
* <p>
* For more information on available Amazon S3 storage classes, see the
* {@link StorageClass} enumeration.
* </p>
*
* @return The Amazon S3 storage class to use when storing the newly copied
* object.
*
* @see AbstractPutObjectRequest#setStorageClass(String)
* @see AbstractPutObjectRequest#setStorageClass(StorageClass)
* @see AbstractPutObjectRequest#withStorageClass(StorageClass)
* @see AbstractPutObjectRequest#withStorageClass(String)
*/
public String getStorageClass() {
return storageClass;
}
/**
* Sets the optional Amazon S3 storage class to use when storing the new
* object. If not specified, the default standard storage class will be
* used when storing the new object.
* <p>
* For more information on Amazon S3 storage classes and available values,
* see the {@link StorageClass} enumeration.
* </p>
*
* @param storageClass
* The storage class to use when storing the new object.
*
* @see #getStorageClass()
* @see #setStorageClass(String)
* @see #withStorageClass(StorageClass)
* @see #withStorageClass(String)
*/
public void setStorageClass(String storageClass) {
this.storageClass = storageClass;
}
/**
* Sets the optional Amazon S3 storage class to use when storing the new
* object. Returns this {@link AbstractPutObjectRequest}, enabling additional method
* calls to be chained together. If not specified, the default standard
* storage class will be used when storing the object.
* <p>
* For more information on Amazon S3 storage classes and available values,
* see the {@link StorageClass} enumeration.
* </p>
*
* @param storageClass
* The storage class to use when storing the new object.
*
* @return This {@link AbstractPutObjectRequest}, enabling additional method calls to be
* chained together.
*
* @see AbstractPutObjectRequest#getStorageClass()
* @see AbstractPutObjectRequest#setStorageClass(StorageClass)
* @see AbstractPutObjectRequest#setStorageClass(String)
* @see AbstractPutObjectRequest#withStorageClass(StorageClass)
*/
public <T extends AbstractPutObjectRequest> T withStorageClass(
String storageClass) {
setStorageClass(storageClass);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
/**
* Sets the optional Amazon S3 storage class to use when storing the new
* object. If not specified, the default standard storage class will be
* used when storing the object.
* <p>
* For more information on Amazon S3 storage classes and available values,
* see the {@link StorageClass} enumeration.
* </p>
*
* @param storageClass
* The storage class to use when storing the new object.
*
* @see #getStorageClass()
* @see #setStorageClass(String)
*/
public void setStorageClass(StorageClass storageClass) {
this.storageClass = storageClass.toString();
}
/**
* Sets the optional Amazon S3 storage class to use when storing the new
* object. Returns this {@link AbstractPutObjectRequest}, enabling additional method
* calls to be chained together. If not specified, the default standard
* storage class will be used when storing the object.
* <p>
* For more information on Amazon S3 storage classes and available values,
* see the {@link StorageClass} enumeration.
* </p>
*
* @param storageClass
* The storage class to use when storing the new object.
*
* @return This {@link AbstractPutObjectRequest}, enabling additional method calls to be
* chained together.
*
* @see AbstractPutObjectRequest#getStorageClass()
* @see AbstractPutObjectRequest#setStorageClass(StorageClass)
* @see AbstractPutObjectRequest#setStorageClass(String)
* @see AbstractPutObjectRequest#withStorageClass(String)
*/
public <T extends AbstractPutObjectRequest> T withStorageClass(
StorageClass storageClass) {
setStorageClass(storageClass);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
/**
* Gets the path and name of the file
* containing the data to be uploaded to Amazon S3.
* Either specify a file or an input stream containing the data to be
* uploaded to Amazon S3; both cannot be specified.
*
* @return The path and name of the file
* containing the data to be uploaded to Amazon S3.
*
* @see AbstractPutObjectRequest#setFile(File)
* @see AbstractPutObjectRequest#withFile(File)
* @see AbstractPutObjectRequest#setInputStream(InputStream)
* @see AbstractPutObjectRequest#withInputStream(InputStream)
*/
@Override
public File getFile() {
return file;
}
/**
* Sets the path and name of the file
* containing the data to be uploaded to Amazon S3.
* Either specify a file or an input stream containing the data to be
* uploaded to Amazon S3; both cannot be specified.
*
* @param file
* The path and name of the
* file containing the data to be uploaded to Amazon S3.
*
* @see AbstractPutObjectRequest#getFile()
* @see AbstractPutObjectRequest#withFile(File)
* @see AbstractPutObjectRequest#getInputStream()
* @see AbstractPutObjectRequest#withInputStream(InputStream)
*/
@Override
public void setFile(File file) {
this.file = file;
}
/**
* Sets the file containing the data to be uploaded to Amazon S3.
* Returns this {@link AbstractPutObjectRequest}, enabling additional method
* calls to be chained together.
* <p>
* Either specify a file or an input stream containing the data to
* be uploaded to Amazon S3; both cannot be specified.
*
* @param file
* The file containing the data to be uploaded to Amazon S3.
*
* @return This {@link AbstractPutObjectRequest}, enabling additional method
* calls to be chained together.
*
* @see AbstractPutObjectRequest#getFile()
* @see AbstractPutObjectRequest#setFile(File)
* @see AbstractPutObjectRequest#getInputStream()
* @see AbstractPutObjectRequest#setInputStream(InputStream)
*/
public <T extends AbstractPutObjectRequest> T withFile(File file) {
setFile(file);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
/**
* Gets the optional metadata instructing Amazon S3 how to handle the
* uploaded data (e.g. custom user metadata, hooks for specifying content
* type, etc.).
* <p>
* If uploading from an input stream,
* <b>always</b> specify metadata with the content size set. Otherwise the
* contents of the input stream have to be buffered in memory before
* being sent to Amazon S3. This can cause very negative performance
* impacts.
* </p>
*
* @return The optional metadata instructing Amazon S3 how to handle the
* uploaded data (e.g. custom user metadata, hooks for specifying
* content type, etc.).
*
* @see AbstractPutObjectRequest#setMetadata(ObjectMetadata)
* @see AbstractPutObjectRequest#withMetadata(ObjectMetadata)
*/
public ObjectMetadata getMetadata() {
return metadata;
}
/**
* Sets the optional metadata instructing Amazon S3 how to handle the
* uploaded data (e.g. custom user metadata, hooks for specifying content
* type, etc.).
* <p>
* If uploading from an input stream,
* <b>always</b> specify metadata with the content size set. Otherwise the
* contents of the input stream have to be buffered in memory before
* being sent to Amazon S3. This can cause very negative performance
* impacts.
* </p>
*
* @param metadata
* The optional metadata instructing Amazon S3 how to handle the
* uploaded data (e.g. custom user metadata, hooks for specifying
* content type, etc.).
*
* @see AbstractPutObjectRequest#getMetadata()
* @see AbstractPutObjectRequest#withMetadata(ObjectMetadata)
*/
public void setMetadata(ObjectMetadata metadata) {
this.metadata = metadata;
}
/**
* Sets the optional metadata instructing Amazon S3 how to handle the
* uploaded data (e.g. custom user metadata, hooks for specifying content
* type, etc.). Returns this {@link AbstractPutObjectRequest}, enabling additional method
* calls to be chained together.
* <p>
* If uploading from an input stream,
* <b>always</b> specify metadata with the content size set. Otherwise the
* contents of the input stream have to be buffered in memory before
* being sent to Amazon S3. This can cause very negative performance
* impacts.
* </p>
*
* @param metadata
* The optional metadata instructing Amazon S3 how to handle the
* uploaded data (e.g. custom user metadata, hooks for specifying
* content type, etc.).
*
* @return This {@link AbstractPutObjectRequest}, enabling additional method
* calls to be chained together.
*
* @see AbstractPutObjectRequest#getMetadata()
* @see AbstractPutObjectRequest#setMetadata(ObjectMetadata)
*/
public <T extends AbstractPutObjectRequest> T withMetadata(
ObjectMetadata metadata) {
setMetadata(metadata);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
/**
* Gets the optional pre-configured access control policy to use for the
* new object.
*
* @return The optional pre-configured access control policy to use for the
* new object.
*
* @see AbstractPutObjectRequest#setCannedAcl(CannedAccessControlList)
* @see AbstractPutObjectRequest#withCannedAcl(CannedAccessControlList)
*/
public CannedAccessControlList getCannedAcl() {
return cannedAcl;
}
/**
* Sets the optional pre-configured access control policy to use for the new
* object.
*
* @param cannedAcl
* The optional pre-configured access control policy to use for
* the new object.
*
* @see AbstractPutObjectRequest#getCannedAcl()
* @see AbstractPutObjectRequest#withCannedAcl(CannedAccessControlList)
*/
public void setCannedAcl(CannedAccessControlList cannedAcl) {
this.cannedAcl = cannedAcl;
}
/**
* Sets the optional pre-configured access control policy to use for the new
* object. Returns this {@link AbstractPutObjectRequest}, enabling additional method
* calls to be chained together.
*
* @param cannedAcl
* The optional pre-configured access control policy to use for
* the new object.
*
* @return This {@link AbstractPutObjectRequest}, enabling additional method
* calls to be chained together.
*
* @see AbstractPutObjectRequest#getCannedAcl()
* @see AbstractPutObjectRequest#setCannedAcl(CannedAccessControlList)
*/
public <T extends AbstractPutObjectRequest> T withCannedAcl(
CannedAccessControlList cannedAcl) {
setCannedAcl(cannedAcl);
@SuppressWarnings("unchecked") T t = (T) this;
return t;
}
/**
* Returns the optional access control list for the new object. If
* specified, cannedAcl will be ignored.
*/
public AccessControlList getAccessControlList() {
return accessControlList;
}
/**
* Sets the optional access control list for the new object. If specified,
* cannedAcl will be ignored.
*
* @param accessControlList
* The access control list for the new object.
*/
public void setAccessControlList(AccessControlList accessControlList) {
this.accessControlList = accessControlList;
}
/**
* Sets the optional access control list for the new object. If specified,
* cannedAcl will be ignored. Returns this {@link AbstractPutObjectRequest},
* enabling additional method calls to be chained together.
*
* @param accessControlList
* The access control list for the new object.
*/
public <T extends AbstractPutObjectRequest> T withAccessControlList(
AccessControlList accessControlList) {
setAccessControlList(accessControlList);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
/**
* Gets the input stream containing the data to be uploaded to Amazon S3.
* The user of this request
* must either specify a file or an input stream containing the data to be
* uploaded to Amazon S3; both cannot be specified.
*
* @return The input stream containing the data to be uploaded to Amazon S3.
* Either specify a file or an input stream containing the
* data to be uploaded to Amazon S3, not both.
*
* @see AbstractPutObjectRequest#setInputStream(InputStream)
* @see AbstractPutObjectRequest#withInputStream(InputStream)
* @see AbstractPutObjectRequest#setFile(File)
* @see AbstractPutObjectRequest#withFile(File)
*/
@Override
public InputStream getInputStream() {
return inputStream;
}
/**
* Sets the input stream containing the data to be uploaded to Amazon S3.
* Either specify a file or an input stream containing the data to be
* uploaded to Amazon S3; both cannot be specified.
*
* @param inputStream
* The input stream containing the data to be uploaded to Amazon
* S3. Either specify a file or an input stream containing the
* data to be uploaded to Amazon S3, not both.
*
* @see AbstractPutObjectRequest#getInputStream()
* @see AbstractPutObjectRequest#withInputStream(InputStream)
* @see AbstractPutObjectRequest#getFile()
* @see AbstractPutObjectRequest#withFile(File)
*/
@Override
public void setInputStream(InputStream inputStream) {
this.inputStream = inputStream;
}
/**
* Sets the input stream containing the data to be uploaded to Amazon S3.
* Returns this {@link AbstractPutObjectRequest}, enabling additional method
* calls to be chained together.
* <p>
* Either specify a file or an input stream containing the data to be
* uploaded to Amazon S3; both cannot be specified.
* </p>
*
* @param inputStream
* The InputStream containing the data to be uploaded to Amazon
* S3.
*
* @return This PutObjectRequest, so that additional method calls can be
* chained together.
*
* @see AbstractPutObjectRequest#getInputStream()
* @see AbstractPutObjectRequest#setInputStream(InputStream)
* @see AbstractPutObjectRequest#getFile()
* @see AbstractPutObjectRequest#setFile(File)
*/
public <T extends AbstractPutObjectRequest> T withInputStream(
InputStream inputStream) {
setInputStream(inputStream);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
/**
* Sets the optional redirect location for the new object.
*
* @param redirectLocation
* The redirect location for the new object.
*/
public void setRedirectLocation(String redirectLocation) {
this.redirectLocation = redirectLocation;
}
/**
* Gets the optional redirect location for the new object.
*/
public String getRedirectLocation() {
return this.redirectLocation;
}
/**
* Sets the optional redirect location for the new object.Returns this
* {@link AbstractPutObjectRequest}, enabling additional method calls to be chained
* together.
* @param redirectLocation
* The redirect location for the new object.
*/
public <T extends AbstractPutObjectRequest> T withRedirectLocation(
String redirectLocation) {
this.redirectLocation = redirectLocation;
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
@Override
public SSECustomerKey getSSECustomerKey() {
return sseCustomerKey;
}
/**
* Sets the optional customer-provided server-side encryption key to use to
* encrypt the uploaded object.
*
* @param sseKey
* The optional customer-provided server-side encryption key to
* use to encrypt the uploaded object.
*/
public void setSSECustomerKey(SSECustomerKey sseKey) {
if (sseKey != null && this.sseAwsKeyManagementParams != null) {
throw new IllegalArgumentException(
"Either SSECustomerKey or SSEAwsKeyManagementParams must not be set at the same time.");
}
this.sseCustomerKey = sseKey;
}
/**
* Sets the optional customer-provided server-side encryption key to use to
* encrypt the uploaded object, and returns the updated request object so
* that additional method calls can be chained together.
*
* @param sseKey
* The optional customer-provided server-side encryption key to
* use to encrypt the uploaded object.
*
* @return This updated request object so that additional method calls can
* be chained together.
*/
public <T extends AbstractPutObjectRequest> T withSSECustomerKey(
SSECustomerKey sseKey) {
setSSECustomerKey(sseKey);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
public ObjectTagging getTagging() {
return tagging;
}
public void setTagging(ObjectTagging tagging) {
this.tagging = tagging;
}
public <T extends PutObjectRequest> T withTagging(ObjectTagging tagSet) {
setTagging(tagSet);
T t = (T)this;
return t;
}
/**
* The Object Lock mode that you want to apply to this object.
*/
public String getObjectLockMode() {
return objectLockMode;
}
/**
* The Object Lock mode that you want to apply to this object.
*/
public <T extends PutObjectRequest> T withObjectLockMode(String objectLockMode) {
this.objectLockMode = objectLockMode;
return (T) this;
}
/**
* The Object Lock mode that you want to apply to this object.
*/
public <T extends PutObjectRequest> T withObjectLockMode(ObjectLockMode objectLockMode) {
return withObjectLockMode(objectLockMode.toString());
}
/**
* The Object Lock mode that you want to apply to this object.
*/
public void setObjectLockMode(String objectLockMode) {
withObjectLockMode(objectLockMode);
}
/**
* The Object Lock mode that you want to apply to this object.
*/
public void setObjectLockMode(ObjectLockMode objectLockMode) {
setObjectLockMode(objectLockMode.toString());
}
/**
* The date and time when you want this object's Object Lock to expire.
*/
public Date getObjectLockRetainUntilDate() {
return objectLockRetainUntilDate;
}
/**
* The date and time when you want this object's Object Lock to expire.
*/
public <T extends PutObjectRequest> T withObjectLockRetainUntilDate(Date objectLockRetainUntilDate) {
this.objectLockRetainUntilDate = objectLockRetainUntilDate;
return (T) this;
}
/**
* The date and time when you want this object's Object Lock to expire.
*/
public void setObjectLockRetainUntilDate(Date objectLockRetainUntilDate) {
withObjectLockRetainUntilDate(objectLockRetainUntilDate);
}
/**
* The Legal Hold status that you want to apply to the specified object.
*/
public String getObjectLockLegalHoldStatus() {
return objectLockLegalHoldStatus;
}
/**
* The Legal Hold status that you want to apply to the specified object.
*/
public <T extends PutObjectRequest> T withObjectLockLegalHoldStatus(String objectLockLegalHoldStatus) {
this.objectLockLegalHoldStatus = objectLockLegalHoldStatus;
return (T) this;
}
/**
* The Legal Hold status that you want to apply to the specified object.
*/
public <T extends PutObjectRequest> T withObjectLockLegalHoldStatus(ObjectLockLegalHoldStatus objectLockLegalHoldStatus) {
return withObjectLockLegalHoldStatus(objectLockLegalHoldStatus.toString());
}
/**
* The Legal Hold status that you want to apply to the specified object.
*/
public void setObjectLockLegalHoldStatus(String objectLockLegalHoldStatus) {
withObjectLockLegalHoldStatus(objectLockLegalHoldStatus);
}
/**
* The Legal Hold status that you want to apply to the specified object.
*/
public void setObjectLockLegalHoldStatus(ObjectLockLegalHoldStatus objectLockLegalHoldStatus) {
setObjectLockLegalHoldStatus(objectLockLegalHoldStatus.toString());
}
/**
* Sets the optional progress listener for receiving updates for object
* upload status.
*
* @param progressListener
* The legacy progress listener that is used exclusively for Amazon S3 client.
*
* @deprecated use {@link #setGeneralProgressListener(ProgressListener)} instead.
*/
@Deprecated
public void setProgressListener(com.amazonaws.services.s3.model.ProgressListener progressListener) {
setGeneralProgressListener(new LegacyS3ProgressListener(progressListener));
}
/**
* Returns the optional progress listener for receiving updates about object
* upload status.
*
* @return the optional progress listener for receiving updates about object
* upload status.
*
* @deprecated use {@link #getGeneralProgressListener()} instead.
*/
@Deprecated
public com.amazonaws.services.s3.model.ProgressListener getProgressListener() {
ProgressListener generalProgressListener = getGeneralProgressListener();
if (generalProgressListener instanceof LegacyS3ProgressListener) {
return ((LegacyS3ProgressListener)generalProgressListener).unwrap();
} else {
return null;
}
}
/**
* Sets the optional progress listener for receiving updates about object
* upload status, and returns this updated object so that additional method
* calls can be chained together.
*
* @param progressListener
* The legacy progress listener that is used exclusively for Amazon S3 client.
*
* @return This updated PutObjectRequest object.
*
* @deprecated use {@link #withGeneralProgressListener(ProgressListener)} instead.
*/
@Deprecated
public <T extends AbstractPutObjectRequest> T withProgressListener(
com.amazonaws.services.s3.model.ProgressListener progressListener) {
setProgressListener(progressListener);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
/**
* Returns the AWS Key Management System parameters used to encrypt the
* object on server side.
*/
@Override
public SSEAwsKeyManagementParams getSSEAwsKeyManagementParams() {
return sseAwsKeyManagementParams;
}
/**
* Sets the AWS Key Management System parameters used to encrypt the object
* on server side.
*/
public void setSSEAwsKeyManagementParams(SSEAwsKeyManagementParams params) {
if (params != null && this.sseCustomerKey != null) {
throw new IllegalArgumentException(
"Either SSECustomerKey or SSEAwsKeyManagementParams must not be set at the same time.");
}
this.sseAwsKeyManagementParams = params;
}
/**
* Sets the AWS Key Management System parameters used to encrypt the object
* on server side.
*
* @return returns the update PutObjectRequest
*/
public <T extends AbstractPutObjectRequest> T withSSEAwsKeyManagementParams(
SSEAwsKeyManagementParams sseAwsKeyManagementParams) {
setSSEAwsKeyManagementParams(sseAwsKeyManagementParams);
@SuppressWarnings("unchecked") T t = (T)this;
return t;
}
@Override
public AbstractPutObjectRequest clone() {
return (AbstractPutObjectRequest) super.clone();
}
protected final <T extends AbstractPutObjectRequest> T copyPutObjectBaseTo(
T target) {
copyBaseTo(target);
final ObjectMetadata metadata = getMetadata();
return target.withAccessControlList(getAccessControlList())
.withCannedAcl(getCannedAcl())
.withInputStream(getInputStream())
.withMetadata(metadata == null ? null : metadata.clone())
.withRedirectLocation(getRedirectLocation())
.withStorageClass(getStorageClass())
.withSSEAwsKeyManagementParams(getSSEAwsKeyManagementParams())
.withSSECustomerKey(getSSECustomerKey())
;
}
}
| |
/**
* Appcelerator Titanium Mobile
* Copyright (c) 2009-2019 by Appcelerator, Inc. All Rights Reserved.
* Licensed under the terms of the Apache Public License
* Please see the LICENSE included with this distribution for details.
*/
package org.appcelerator.titanium;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.appcelerator.kroll.common.Log;
import android.content.Context;
import android.util.DisplayMetrics;
import android.util.TypedValue;
import android.view.Display;
import android.view.View;
import android.view.WindowManager;
/**
* A class used to handle different unit measurements for layout purposes.
* Supported units include:
* <li> TypedValue.COMPLEX_UNIT_PX </li>
* <li> TypedValue.COMPLEX_UNIT_PT </li>
* <li> TypedValue.COMPLEX_UNIT_DIP </li>
* <li> TypedValue.COMPLEX_UNIT_SP </li>
* <li> TypedValue.COMPLEX_UNIT_MM </li>
* <li> TypedValue.COMPLEX_UNIT_IN </li>
* <li> TiDimension.COMPLEX_UNIT_PERCENT </li>
* <li> TiDimension.COMPLEX_UNIT_AUTO </li>
* <li> TiDimension.COMPLEX_UNIT_UNDEFINED </li>
* <li> TiDimension.COMPLEX_UNIT_CM </li>
* Refer to {@link android.util.TypedValue} for more details.
*
*/
public class TiDimension
{
private static final String TAG = "TiDimension";
public static final int COMPLEX_UNIT_UNDEFINED = TypedValue.COMPLEX_UNIT_MASK + 1;
public static final int COMPLEX_UNIT_PERCENT = TypedValue.COMPLEX_UNIT_MASK + 2;
public static final int COMPLEX_UNIT_AUTO = TypedValue.COMPLEX_UNIT_MASK + 3;
public static final int COMPLEX_UNIT_CM = TypedValue.TYPE_DIMENSION + 1;
public static final int TYPE_UNDEFINED = -1;
public static final int TYPE_LEFT = 0;
public static final int TYPE_CENTER_X = 1;
public static final int TYPE_RIGHT = 2;
public static final int TYPE_TOP = 3;
public static final int TYPE_CENTER_Y = 4;
public static final int TYPE_BOTTOM = 5;
public static final int TYPE_WIDTH = 6;
public static final int TYPE_HEIGHT = 7;
public static final double POINT_DPI = 72.0;
public static final double MM_INCH = 25.4;
public static final double CM_INCH = 2.54;
public static final String UNIT_CM = "cm";
public static final String UNIT_DIP = "dip";
public static final String UNIT_DP = "dp";
public static final String UNIT_IN = "in";
public static final String UNIT_MM = "mm";
public static final String UNIT_PX = "px";
public static final String UNIT_PT = "pt";
public static final String UNIT_SP = "sp";
public static final String UNIT_SIP = "sip";
public static final String UNIT_SYSTEM = "system";
public static final String UNIT_PERCENT = "%";
public static final String UNIT_AUTO = "auto";
public static Pattern DIMENSION_PATTERN =
Pattern.compile("(-?[0-9]*\\.?[0-9]+)\\s*(system|px|dp|dip|sp|sip|mm|cm|pt|in|%)?");
protected static DisplayMetrics metrics = null;
protected double value;
protected int units, valueType;
/**
* Creates a TiDimension object.
* @param value the value to set.
* @param valueType the valueType to set. Supported types include: {@link #TYPE_LEFT}, {@link #TYPE_RIGHT},
* {@link #TYPE_BOTTOM}, {@link #TYPE_TOP}, {@link #TYPE_CENTER_X}, {@link #TYPE_CENTER_Y}, {@link #TYPE_HEIGHT}.
* {@link #TYPE_WIDTH}.
*/
public TiDimension(double value, int valueType)
{
this(value, valueType, COMPLEX_UNIT_UNDEFINED);
}
/**
* Creates a TiDimension object.
* @param value the value to set.
* @param valueType the valueType to set. Supported types include: {@link #TYPE_LEFT}, {@link #TYPE_RIGHT},
* {@link #TYPE_BOTTOM}, {@link #TYPE_TOP}, {@link #TYPE_CENTER_X}, {@link #TYPE_CENTER_Y}, {@link #TYPE_HEIGHT}.
* {@link #TYPE_WIDTH}.
* @param units the units this value is denominated in. Supported types include: {@link #COMPLEX_UNIT_UNDEFINED}, {@link #TYPE_RIGHT},
* {@link #COMPLEX_UNIT_AUTO}, {@link #COMPLEX_UNIT_PERCENT}, {@link #COMPLEX_UNIT_CM}, {@link TypedValue#COMPLEX_UNIT_PX}, {@link TypedValue#COMPLEX_UNIT_PT}.
* {@link TypedValue#COMPLEX_UNIT_DIP}, {@link TypedValue#COMPLEX_UNIT_SP}, {@link TypedValue#COMPLEX_UNIT_MM}, {@link TypedValue#COMPLEX_UNIT_IN}.
*/
public TiDimension(double value, int valueType, int units)
{
this.value = value;
this.valueType = valueType;
this.units = units;
}
/**
* Creates and parses a TiDimension object.
* @param svalue the string to parse.
* @param valueType the valueType to set. Supported types include: {@link #TYPE_LEFT}, {@link #TYPE_RIGHT},
* {@link #TYPE_BOTTOM}, {@link #TYPE_TOP}, {@link #TYPE_CENTER_X}, {@link #TYPE_CENTER_Y}, {@link #TYPE_HEIGHT}.
* {@link #TYPE_WIDTH}.
*/
public TiDimension(String svalue, int valueType)
{
this.valueType = valueType;
this.units = TypedValue.COMPLEX_UNIT_PX;
if (svalue != null) {
Matcher m = DIMENSION_PATTERN.matcher(svalue.trim());
if (m.matches()) {
value = Float.parseFloat(m.group(1));
if (m.groupCount() == 2) {
String unit = m.group(2);
// if there is no unit, then use the default one
if (unit == null) {
unit = TiApplication.getInstance().getDefaultUnit();
}
if (UNIT_PX.equals(unit) || UNIT_SYSTEM.equals(unit)) {
this.units = TypedValue.COMPLEX_UNIT_PX;
} else if (UNIT_PT.equals(unit)) {
this.units = TypedValue.COMPLEX_UNIT_PT;
} else if (UNIT_DP.equals(unit) || UNIT_DIP.equals(unit)) {
this.units = TypedValue.COMPLEX_UNIT_DIP;
} else if (UNIT_SP.equals(unit) || UNIT_SIP.equals(unit)) {
this.units = TypedValue.COMPLEX_UNIT_SP;
} else if (UNIT_PERCENT.equals(unit)) {
this.units = COMPLEX_UNIT_PERCENT;
} else if (UNIT_MM.equals(unit)) {
this.units = TypedValue.COMPLEX_UNIT_MM;
} else if (UNIT_CM.equals(unit)) {
this.units = COMPLEX_UNIT_CM;
} else if (UNIT_IN.equals(unit)) {
this.units = TypedValue.COMPLEX_UNIT_IN;
} else {
if (unit != null) {
Log.w(TAG, "Unknown unit: " + unit, Log.DEBUG_MODE);
}
}
}
} else if (svalue.trim().equals(UNIT_AUTO)) {
this.value = Integer.MIN_VALUE;
this.units = COMPLEX_UNIT_AUTO;
}
}
}
/**
* @return the TiDimension's value.
*/
public double getValue()
{
return value;
}
/**
* @return the TiDimension's int value.
*/
public int getIntValue()
{
return Double.valueOf(value).intValue();
}
/**
* Sets value to a double value.
* @param value a double to be set.
*/
public void setValue(double value)
{
this.value = value;
}
/**
* @return the TiDimension's units. Supported units include:
* <li> TypedValue.COMPLEX_UNIT_PX </li>
* <li> TypedValue.COMPLEX_UNIT_PT </li>
* <li> TypedValue.COMPLEX_UNIT_DIP </li>
* <li> TypedValue.COMPLEX_UNIT_SP </li>
* <li> TypedValue.COMPLEX_UNIT_MM </li>
* <li> TypedValue.COMPLEX_UNIT_IN </li>
* <li> TypedValue.COMPLEX_UNIT_CM </li>
* <li> TiDimension.COMPLEX_UNIT_PERCENT </li>
* <li> TiDimension.COMPLEX_UNIT_AUTO </li>
* <li> TiDimension.COMPLEX_UNIT_UNDEFINED </li>
* Refer to {@link android.util.TypedValue} for more details.
*/
public int getUnits()
{
return units;
}
/**
* Set TiDimension's units. Refer to {@link #getUnits()} for more details.
* @param units the unit to set.
*/
public void setUnits(int units)
{
this.units = units;
}
public double getPixels(View parent)
{
switch (units) {
case TypedValue.COMPLEX_UNIT_PX:
case COMPLEX_UNIT_UNDEFINED:
return this.value;
case COMPLEX_UNIT_PERCENT:
return getPercentPixels(parent);
case TypedValue.COMPLEX_UNIT_DIP:
case TypedValue.COMPLEX_UNIT_SP:
return getScaledPixels(parent);
case TypedValue.COMPLEX_UNIT_PT:
case TypedValue.COMPLEX_UNIT_MM:
case COMPLEX_UNIT_CM:
case TypedValue.COMPLEX_UNIT_IN:
return getSizePixels(parent);
}
return -1;
}
/**
* Calculates and returns the number of pixels, depending on the type.
* It also takes screen/view density into consideration.
* @param parent the parent view used for calculation.
* @return the number of pixels.
*/
public int getAsPixels(View parent)
{
return (int) Math.round(getPixels(parent));
}
public double getAsMillimeters(View parent)
{
if (units == TypedValue.COMPLEX_UNIT_MM) {
return this.value;
}
return ((getPixels(parent) / getDPIForType(parent)) * MM_INCH);
}
public double getAsCentimeters(View parent)
{
if (units == COMPLEX_UNIT_CM) {
return this.value;
}
return ((getPixels(parent) / getDPIForType(parent)) * CM_INCH);
}
public double getAsInches(View parent)
{
if (units == TypedValue.COMPLEX_UNIT_IN) {
return this.value;
}
return (getPixels(parent) / getDPIForType(parent));
}
public int getAsDIP(View parent)
{
if (units == TypedValue.COMPLEX_UNIT_DIP) {
return (int) this.value;
}
return (int) Math.round((getPixels(parent) / getDisplayMetrics(parent).density));
}
/**
* Calculates and returns the dimension in the default units. If the default
* unit is not valid, returns in PX.
* @param parent the parent of the view used for calculation
* @return the dimension in the system unit
*/
public double getAsDefault(View parent)
{
String defaultUnit = TiApplication.getInstance().getDefaultUnit();
if (UNIT_DP.equals(defaultUnit) || UNIT_DIP.equals(defaultUnit)) {
return (double) getAsDIP(parent);
} else if (UNIT_MM.equals(defaultUnit)) {
return getAsMillimeters(parent);
} else if (UNIT_CM.equals(defaultUnit)) {
return getAsCentimeters(parent);
} else if (UNIT_IN.equals(defaultUnit)) {
return getAsInches(parent);
}
// Returned for PX, SYSTEM, and unknown values
return (double) getAsPixels(parent);
}
protected double getPercentPixels(View parent)
{
if (parent == null) {
return -1;
}
int dimension;
switch (valueType) {
case TYPE_TOP:
case TYPE_BOTTOM:
case TYPE_CENTER_Y:
case TYPE_HEIGHT:
dimension = Math.max(parent.getHeight(), 0);
break;
case TYPE_LEFT:
case TYPE_RIGHT:
case TYPE_CENTER_X:
case TYPE_WIDTH:
dimension = Math.max(parent.getWidth(), 0);
break;
default:
return -1;
}
return ((this.value / 100.0) * dimension);
}
protected static DisplayMetrics getDisplayMetrics(View parent)
{
if (metrics == null) {
Context context = (parent != null) ? parent.getContext() : TiApplication.getInstance();
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
Display display = windowManager.getDefaultDisplay();
metrics = new DisplayMetrics();
display.getMetrics(metrics);
}
return metrics;
}
protected double getScaledPixels(View parent)
{
DisplayMetrics metrics = getDisplayMetrics(parent);
if (units == TypedValue.COMPLEX_UNIT_DIP) {
return (metrics.density * this.value);
} else if (units == TypedValue.COMPLEX_UNIT_SP) {
return (metrics.scaledDensity * this.value);
}
return -1;
}
protected double getDPIForType(View parent)
{
DisplayMetrics metrics = getDisplayMetrics(parent);
float dpi = -1;
switch (valueType) {
case TYPE_TOP:
case TYPE_BOTTOM:
case TYPE_CENTER_Y:
case TYPE_HEIGHT:
dpi = metrics.ydpi;
break;
case TYPE_LEFT:
case TYPE_RIGHT:
case TYPE_CENTER_X:
case TYPE_WIDTH:
dpi = metrics.xdpi;
break;
default:
dpi = metrics.densityDpi;
}
return dpi;
}
protected double getSizePixels(View parent)
{
double dpi = getDPIForType(parent);
if (units == TypedValue.COMPLEX_UNIT_PT) {
return (this.value * (dpi / POINT_DPI));
} else if (units == TypedValue.COMPLEX_UNIT_MM) {
return ((this.value / MM_INCH) * dpi);
} else if (units == COMPLEX_UNIT_CM) {
return ((this.value / CM_INCH) * dpi);
} else if (units == TypedValue.COMPLEX_UNIT_IN) {
return (this.value * dpi);
}
return -1;
}
/**
* @return true if units is TiDimension.COMPLEX_UNIT_UNDEFINED, false otherwise.
*/
public boolean isUnitUndefined()
{
return units == COMPLEX_UNIT_UNDEFINED;
}
/**
* @return true if units is TiDimension.COMPLEX_UNIT_PERCENT, false otherwise.
*/
public boolean isUnitPercent()
{
return units == COMPLEX_UNIT_PERCENT;
}
public boolean isUnitAuto()
{
return units == COMPLEX_UNIT_AUTO;
}
/**
* @return string representation of the TiDimension object.
*/
public String toString()
{
StringBuilder sb = new StringBuilder(10);
if (!isUnitAuto()) {
sb.append(value);
switch (units) {
case TypedValue.COMPLEX_UNIT_PX:
sb.append(UNIT_PX);
break;
case TypedValue.COMPLEX_UNIT_PT:
sb.append(UNIT_PT);
break;
case TypedValue.COMPLEX_UNIT_DIP:
sb.append(UNIT_DIP);
break;
case TypedValue.COMPLEX_UNIT_SP:
sb.append(UNIT_SP);
break;
case TypedValue.COMPLEX_UNIT_MM:
sb.append(UNIT_MM);
break;
case COMPLEX_UNIT_CM:
sb.append(UNIT_CM);
break;
case TypedValue.COMPLEX_UNIT_IN:
sb.append(UNIT_IN);
break;
case COMPLEX_UNIT_PERCENT:
sb.append(UNIT_PERCENT);
break;
}
} else {
sb.append(UNIT_AUTO);
}
return sb.toString();
}
}
| |
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
package com.cloud.network.resource;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import javax.naming.ConfigurationException;
import org.apache.log4j.Logger;
import com.cloud.agent.IAgentControl;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.DirectNetworkUsageAnswer;
import com.cloud.agent.api.DirectNetworkUsageCommand;
import com.cloud.agent.api.MaintainAnswer;
import com.cloud.agent.api.MaintainCommand;
import com.cloud.agent.api.PingCommand;
import com.cloud.agent.api.ReadyAnswer;
import com.cloud.agent.api.ReadyCommand;
import com.cloud.agent.api.RecurringNetworkUsageAnswer;
import com.cloud.agent.api.RecurringNetworkUsageCommand;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupTrafficMonitorCommand;
import com.cloud.host.Host;
import com.cloud.resource.ServerResource;
import com.cloud.utils.exception.ExecutionException;
import java.net.HttpURLConnection;
public class TrafficSentinelResource implements ServerResource {
private String _name;
private String _zoneId;
private String _ip;
private String _guid;
private String _url;
private String _inclZones;
private String _exclZones;
private static final Logger s_logger = Logger.getLogger(TrafficSentinelResource.class);
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
try {
_name = name;
_zoneId = (String)params.get("zone");
if (_zoneId == null) {
throw new ConfigurationException("Unable to find zone");
}
_ip = (String)params.get("ipaddress");
if (_ip == null) {
throw new ConfigurationException("Unable to find IP");
}
_guid = (String)params.get("guid");
if (_guid == null) {
throw new ConfigurationException("Unable to find the guid");
}
_url = (String)params.get("url");
if (_url == null) {
throw new ConfigurationException("Unable to find url");
}
_inclZones = (String)params.get("inclZones");
_exclZones = (String)params.get("exclZones");
return true;
} catch (Exception e) {
throw new ConfigurationException(e.getMessage());
}
}
@Override
public StartupCommand[] initialize() {
StartupTrafficMonitorCommand cmd = new StartupTrafficMonitorCommand();
cmd.setName(_name);
cmd.setDataCenter(_zoneId);
cmd.setPod("");
cmd.setPrivateIpAddress(_ip);
cmd.setStorageIpAddress("");
cmd.setVersion(TrafficSentinelResource.class.getPackage().getImplementationVersion());
cmd.setGuid(_guid);
return new StartupCommand[] {cmd};
}
@Override
public Host.Type getType() {
return Host.Type.TrafficMonitor;
}
@Override
public String getName() {
return _name;
}
@Override
public PingCommand getCurrentStatus(final long id) {
return new PingCommand(Host.Type.TrafficMonitor, id);
}
@Override
public boolean start() {
return true;
}
@Override
public boolean stop() {
return true;
}
@Override
public void disconnected() {
return;
}
@Override
public IAgentControl getAgentControl() {
return null;
}
@Override
public void setAgentControl(IAgentControl agentControl) {
return;
}
@Override
public Answer executeRequest(Command cmd) {
if (cmd instanceof ReadyCommand) {
return execute((ReadyCommand)cmd);
} else if (cmd instanceof MaintainCommand) {
return execute((MaintainCommand)cmd);
} else if (cmd instanceof DirectNetworkUsageCommand) {
return execute((DirectNetworkUsageCommand)cmd);
} else if (cmd instanceof RecurringNetworkUsageCommand) {
return execute((RecurringNetworkUsageCommand)cmd);
} else {
return Answer.createUnsupportedCommandAnswer(cmd);
}
}
private Answer execute(ReadyCommand cmd) {
return new ReadyAnswer(cmd);
}
private synchronized RecurringNetworkUsageAnswer execute(RecurringNetworkUsageCommand cmd) {
return new RecurringNetworkUsageAnswer(cmd);
}
private synchronized DirectNetworkUsageAnswer execute(DirectNetworkUsageCommand cmd) {
try {
return getPublicIpBytesSentAndReceived(cmd);
} catch (ExecutionException e) {
return new DirectNetworkUsageAnswer(cmd, e);
}
}
private Answer execute(MaintainCommand cmd) {
return new MaintainAnswer(cmd);
}
private DirectNetworkUsageAnswer getPublicIpBytesSentAndReceived(DirectNetworkUsageCommand cmd) throws ExecutionException {
DirectNetworkUsageAnswer answer = new DirectNetworkUsageAnswer(cmd);
try {
//Direct Network Usage
URL trafficSentinel;
//Use Global include/exclude zones if there are no per TS zones
if (_inclZones == null) {
_inclZones = cmd.getIncludeZones();
}
if (_exclZones == null) {
_exclZones = cmd.getExcludeZones();
}
BufferedReader in = null;
OutputStream os = null;
try {
//Query traffic Sentinel using POST method. 3 parts to the connection call and subsequent writing.
//Part 1 - Connect to the URL of the traffic sentinel's instance.
trafficSentinel = new URL(_url + "/inmsf/Query");
String postData = "script="+URLEncoder.encode(getScript(cmd.getPublicIps(), cmd.getStart(), cmd.getEnd()), "UTF-8")+"&authenticate=basic&resultFormat=txt";
HttpURLConnection con = (HttpURLConnection) trafficSentinel.openConnection();
con.setRequestMethod("POST");
con.setRequestProperty("Content-Length", String.valueOf(postData.length()));
con.setDoOutput(true);
//Part 2 - Write Data
os = con.getOutputStream();
os.write(postData.getBytes("UTF-8"));
//Part 3 - Read response of the request
in = new BufferedReader(new InputStreamReader(con.getInputStream()));
String inputLine;
while ((inputLine = in.readLine()) != null) {
//Parse the script output
StringTokenizer st = new StringTokenizer(inputLine, ",");
if (st.countTokens() == 3) {
String publicIp = st.nextToken();
Long bytesSent = new Long(st.nextToken());
Long bytesRcvd = new Long(st.nextToken());
long[] bytesSentAndReceived = new long[2];
bytesSentAndReceived[0] = bytesSent;
bytesSentAndReceived[1] = bytesRcvd;
answer.put(publicIp, bytesSentAndReceived);
}
}
} catch (MalformedURLException e1) {
s_logger.info("Invalid Traffic Sentinel URL", e1);
throw new ExecutionException(e1.getMessage());
} catch (IOException e) {
s_logger.debug("Error in direct network usage accounting", e);
throw new ExecutionException(e.getMessage());
} finally {
if (os != null) {
os.close();
}
if (in != null) {
in.close();
}
}
} catch (Exception e) {
s_logger.debug(e);
throw new ExecutionException(e.getMessage());
}
return answer;
}
private String getScript(List<String> ips, Date start, Date end) {
String IpAddresses = "";
for (int i = 0; i < ips.size(); i++) {
IpAddresses += ips.get(i);
if (i != (ips.size() - 1)) {
// Append comma for all Ips except the last Ip
IpAddresses += ",";
}
}
String destZoneCondition = "";
if (_inclZones != null && !_inclZones.isEmpty()) {
destZoneCondition = " & destinationzone = " + _inclZones;
}
if (_exclZones != null && !_exclZones.isEmpty()) {
destZoneCondition += " & destinationzone != " + _exclZones;
}
String srcZoneCondition = "";
if (_inclZones != null && !_inclZones.isEmpty()) {
srcZoneCondition = " & sourcezone = " + _inclZones;
}
if (_exclZones != null && !_exclZones.isEmpty()) {
srcZoneCondition += " & sourcezone != " + _exclZones;
}
String startDate = getDateString(start);
String endtDate = getDateString(end);
StringBuffer sb = new StringBuffer();
sb.append("var q = Query.topN(\"historytrmx\",");
sb.append(" \"ipsource,bytes\",");
sb.append(" \"ipsource = " + IpAddresses + destZoneCondition + "\",");
sb.append(" \"" + startDate + ", " + endtDate + "\",");
sb.append(" \"bytes\",");
sb.append(" 100000);");
sb.append("var totalsSent = {};");
sb.append("var t = q.run(");
sb.append(" function(row,table) {");
sb.append(" if(row[0]) { ");
sb.append(" totalsSent[row[0]] = row[1];");
sb.append(" }");
sb.append(" });");
sb.append("var q = Query.topN(\"historytrmx\",");
sb.append(" \"ipdestination,bytes\",");
sb.append(" \"ipdestination = " + IpAddresses + srcZoneCondition + "\",");
sb.append(" \"" + startDate + ", " + endtDate + "\",");
sb.append(" \"bytes\",");
sb.append(" 100000);");
sb.append("var totalsRcvd = {};");
sb.append("var t = q.run(");
sb.append(" function(row,table) {");
sb.append(" if(row[0]) {");
sb.append(" totalsRcvd[row[0]] = row[1];");
sb.append(" }");
sb.append(" });");
sb.append("for (var addr in totalsSent) {");
sb.append(" var TS = 0;");
sb.append(" var TR = 0;");
sb.append(" if(totalsSent[addr]) TS = totalsSent[addr];");
sb.append(" if(totalsRcvd[addr]) TR = totalsRcvd[addr];");
sb.append(" println(addr + \",\" + TS + \",\" + TR);");
sb.append("}");
return sb.toString();
}
private String getDateString(Date date) {
DateFormat dfDate = new SimpleDateFormat("yyyyMMdd HH:mm:ss");
return dfDate.format(date);
}
@Override
public void setName(String name) {
// TODO Auto-generated method stub
}
@Override
public void setConfigParams(Map<String, Object> params) {
// TODO Auto-generated method stub
}
@Override
public Map<String, Object> getConfigParams() {
// TODO Auto-generated method stub
return null;
}
@Override
public int getRunLevel() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setRunLevel(int level) {
// TODO Auto-generated method stub
}
}
| |
/*
* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tiles.portlet.context;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.portlet.PortletRequest;
import org.apache.tiles.context.MapEntry;
/**
* <p>Private implementation of <code>Map</code> for portlet parameter
* name-values[].</p>
*
* @version $Rev$ $Date$
*/
final class PortletParamValuesMap implements Map<String, String[]> {
/**
* Constructor.
*
* @param request The portlet request to use.
*/
public PortletParamValuesMap(PortletRequest request) {
this.request = request;
}
/**
* The portlet request to use.
*/
private PortletRequest request = null;
/** {@inheritDoc} */
public void clear() {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
public boolean containsKey(Object key) {
return (request.getParameter(key(key)) != null);
}
/** {@inheritDoc} */
public boolean containsValue(Object value) {
if (!(value instanceof String[])) {
return (false);
}
String[] test = (String[]) value;
Iterator<String[]> values = values().iterator();
while (values.hasNext()) {
String[] actual = values.next();
if (test.length == actual.length) {
boolean matched = true;
for (int i = 0; i < test.length; i++) {
if (!test[i].equals(actual[i])) {
matched = false;
break;
}
}
if (matched) {
return (true);
}
}
}
return (false);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
public Set<Map.Entry<String, String[]>> entrySet() {
Set<Map.Entry<String, String[]>> set = new HashSet<Map.Entry<String, String[]>>();
Enumeration<String> keys = request.getParameterNames();
String key;
while (keys.hasMoreElements()) {
key = keys.nextElement();
set.add(new MapEntry<String, String[]>(key, (request
.getParameterValues(key)), false));
}
return (set);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
public boolean equals(Object o) {
PortletRequest otherRequest = ((PortletParamValuesMap) o).request;
boolean retValue = true;
synchronized (request) {
for (Enumeration<String> attribs = request.getParameterNames(); attribs
.hasMoreElements()
&& retValue;) {
String parameterName = attribs.nextElement();
retValue = request.getParameterValues(parameterName).equals(
otherRequest.getParameterValues(parameterName));
}
}
return retValue;
}
/** {@inheritDoc} */
public String[] get(Object key) {
return (request.getParameterValues(key(key)));
}
/** {@inheritDoc} */
public int hashCode() {
return (request.hashCode());
}
/** {@inheritDoc} */
public boolean isEmpty() {
return (size() < 1);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
public Set<String> keySet() {
Set<String> set = new HashSet<String>();
Enumeration<String> keys = request.getParameterNames();
while (keys.hasMoreElements()) {
set.add(keys.nextElement());
}
return (set);
}
/** {@inheritDoc} */
public String[] put(String key, String[] value) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
public void putAll(Map<? extends String, ? extends String[]> map) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
public String[] remove(Object key) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
public int size() {
int n = 0;
Enumeration<String> keys = request.getParameterNames();
while (keys.hasMoreElements()) {
keys.nextElement();
n++;
}
return (n);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
public Collection<String[]> values() {
List<String[]> list = new ArrayList<String[]>();
Enumeration<String> keys = request.getParameterNames();
while (keys.hasMoreElements()) {
list.add(request.getParameterValues(keys.nextElement()));
}
return (list);
}
/**
* Returns the string representation of the key.
*
* @param key The key.
* @return The string representation of the key.
* @throws IllegalArgumentException If the key is <code>null</code>.
*/
private String key(Object key) {
if (key == null) {
throw new IllegalArgumentException();
} else if (key instanceof String) {
return ((String) key);
} else {
return (key.toString());
}
}
}
| |
package com.bitdubai.fermat_osa_addon.layer.android.device_conectivity.developer.bitdubai.version_1.structure;
import android.content.Context;
import android.net.NetworkInfo;
import com.bitdubai.fermat_api.layer.all_definition.events.EventSource;
import com.bitdubai.fermat_api.layer.dmp_world.wallet.exceptions.CantStartAgentException;
import com.bitdubai.fermat_api.layer.all_definition.enums.Addons;
import com.bitdubai.fermat_api.layer.all_definition.enums.Plugins;
import com.bitdubai.fermat_api.layer.all_definition.events.interfaces.FermatEvent;
import com.bitdubai.fermat_osa_addon.layer.android.device_conectivity.developer.bitdubai.version_1.interfaces.ConnectionType;
import com.bitdubai.fermat_osa_addon.layer.android.device_conectivity.developer.bitdubai.version_1.interfaces.ConnectivityAgent;
import com.bitdubai.fermat_osa_addon.layer.android.device_conectivity.developer.bitdubai.version_1.interfaces.Network;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.DealsWithErrors;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.ErrorManager;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.UnexpectedAddonsExceptionSeverity;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.UnexpectedPluginExceptionSeverity;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.interfaces.DealsWithEvents;
import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.interfaces.EventManager;
/**
* Created by Natalia on 08/05/2015.
*/
/**
* The DeviceConnectivityAgent class is the implementation of ConnectivityAgent interfaces that is handled to DeviceManager.
* That class check connections networks changes and raise an event.
*/
public class DeviceConnectivityAgent implements ConnectivityAgent,DealsWithErrors,DealsWithEvents {
/**
* ConnectivityAgent Member Variables.
*/
Thread agentThread;
MonitorAgent monitorAgent;
Network connectionIfo = new DeviceNetwork();
Context context;
ConnectionType activeNetwork;
/**
* DealsWithErrors Interface member variables.
*/
ErrorManager errorManager;
/**
* DealsWithEvents Interface member variables.
*/
EventManager eventManager;
/**
* <p>DeviceConnectivityAgent implementation constructor
*
* @param activeNetwork ConnectionType enum
*/
public DeviceConnectivityAgent(ConnectionType activeNetwork){
this.activeNetwork = activeNetwork;
}
/**
* ConnectivityMonitor Interface implementation.
*/
@Override
public void start() throws CantStartAgentException{
this.monitorAgent = new MonitorAgent ();
try {
this.monitorAgent.Initialize();
this.monitorAgent.setErrorManager(this.errorManager);
this.agentThread = new Thread(this.monitorAgent);
this.agentThread.start();
}
catch (Exception exception) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_INCOMING_CRYPTO_TRANSACTION, UnexpectedPluginExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_PLUGIN, exception);
throw new CantStartAgentException();
}
}
@Override
public void stop(){
this.agentThread.interrupt();
}
/**
* <p>This method gets the active connection info
*
* @return Network object
*/
@Override
public Network getConnectionInfo(){
return connectionIfo;
}
/**
* DealsWithErrors Interface implementation.
*/
public void setErrorManager (ErrorManager errorManager){
this.errorManager = errorManager;
}
/**
* DealWithEvents Interface implementation.
*/
@Override
public void setEventManager(EventManager eventManager) {
this.eventManager = eventManager;
}
private class MonitorAgent implements DealsWithErrors, Runnable {
/**
* DealsWithErrors Interface member variables.
*/
ErrorManager errorManager;
private static final int SLEEP_TIME = 5000;
/**
*DealsWithErrors Interface implementation.
*/
@Override
public void setErrorManager(ErrorManager errorManager) {
this.errorManager = errorManager;
}
/**
* MonitorAgent interface implementation.
*/
private void Initialize () {
}
/**
* Runnable Interface implementation.
*/
@Override
public void run() {
/**
* Infinite loop.
*/
while (true) {
/**
* Sleep for a while.
*/
try {
Thread.sleep(SLEEP_TIME);
} catch (InterruptedException interruptedException) {
cleanResources();
return;
}
/**
* Now I do the main task.
*/
doTheMainTask();
/**
* Check if I have been Interrupted.
*/
if (Thread.currentThread().isInterrupted()) {
cleanResources();
return;
}
}
}
private void doTheMainTask() {
try{
//Search active network and compare with last, if different i raise event
android.net.ConnectivityManager connectivityManager = ( android.net.ConnectivityManager) context.getSystemService( Context.CONNECTIVITY_SERVICE );
NetworkInfo activeNetInfo = connectivityManager.getActiveNetworkInfo();
// NetworkInfo mobNetInfo = connectivityManager.getNetworkInfo(android.net.ConnectivityManager.TYPE_MOBILE );
if ( activeNetInfo != null )
{
connectionIfo.setIsConnected(activeNetInfo.isConnected());
switch(activeNetInfo.getType()) {
case android.net.ConnectivityManager.TYPE_MOBILE_DUN:
connectionIfo.setType(ConnectionType.MOBILE_DUN);
break;
case android.net.ConnectivityManager.TYPE_MOBILE_HIPRI:
connectionIfo.setType(ConnectionType.MOBILE_HIPRI);
break;
case android.net.ConnectivityManager.TYPE_MOBILE_SUPL:
connectionIfo.setType(ConnectionType.MOBILE_SUPL);
break;
case android.net.ConnectivityManager.TYPE_MOBILE_MMS:
connectionIfo.setType(ConnectionType.MOBILE_MMS);
break;
case android.net.ConnectivityManager.TYPE_MOBILE: //0
connectionIfo.setType(ConnectionType.MOBILE_DATA);
break;
case android.net.ConnectivityManager.TYPE_WIFI: //1
connectionIfo.setType(ConnectionType.WI_FI);
break;
case android.net.ConnectivityManager.TYPE_WIMAX: //6
connectionIfo.setType(ConnectionType.WIMAX);
break;
case android.net.ConnectivityManager.TYPE_ETHERNET://9
connectionIfo.setType(ConnectionType.ETHERNET);
break;
case android.net.ConnectivityManager.TYPE_BLUETOOTH://7
connectionIfo.setType(ConnectionType.BLUETOOTH);
break;
default:
break;
}
/**
* network change -raise event
*/
if (activeNetwork != connectionIfo.getType() ){
activeNetwork = connectionIfo.getType();
FermatEvent fermatEvent = eventManager.getNewEvent(null);
fermatEvent.setSource(EventSource.DEVICE_CONNECTIVITY);
eventManager.raiseEvent(fermatEvent);
}
}
}
catch (Exception exception) {
errorManager.reportUnexpectedAddonsException(Addons.DEVICE_CONNECTIVITY, UnexpectedAddonsExceptionSeverity.DISABLES_SOME_FUNCTIONALITY_WITHIN_THIS_ADDONS, exception);
}
}
private void cleanResources() {
/**
* Disconnect from database and explicitly set all references to null.
*/
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.support.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class DescribeCasesRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable {
/**
* <p>
* A list of ID numbers of the support cases you want returned. The maximum
* number of cases is 100.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> caseIdList;
/**
* <p>
* The ID displayed for a case in the AWS Support Center user interface.
* </p>
*/
private String displayId;
/**
* <p>
* The start date for a filtered date search on support case communications.
* Case communications are available for 12 months after creation.
* </p>
*/
private String afterTime;
/**
* <p>
* The end date for a filtered date search on support case communications.
* Case communications are available for 12 months after creation.
* </p>
*/
private String beforeTime;
/**
* <p>
* Specifies whether resolved support cases should be included in the
* <a>DescribeCases</a> results. The default is <i>false</i>.
* </p>
*/
private Boolean includeResolvedCases;
/**
* <p>
* A resumption point for pagination.
* </p>
*/
private String nextToken;
/**
* <p>
* The maximum number of results to return before paginating.
* </p>
*/
private Integer maxResults;
/**
* <p>
* The ISO 639-1 code for the language in which AWS provides support. AWS
* Support currently supports English ("en") and Japanese ("ja"). Language
* parameters must be passed explicitly for operations that take them.
* </p>
*/
private String language;
/**
* <p>
* Specifies whether communications should be included in the
* <a>DescribeCases</a> results. The default is <i>true</i>.
* </p>
*/
private Boolean includeCommunications;
/**
* <p>
* A list of ID numbers of the support cases you want returned. The maximum
* number of cases is 100.
* </p>
*
* @return A list of ID numbers of the support cases you want returned. The
* maximum number of cases is 100.
*/
public java.util.List<String> getCaseIdList() {
if (caseIdList == null) {
caseIdList = new com.amazonaws.internal.SdkInternalList<String>();
}
return caseIdList;
}
/**
* <p>
* A list of ID numbers of the support cases you want returned. The maximum
* number of cases is 100.
* </p>
*
* @param caseIdList
* A list of ID numbers of the support cases you want returned. The
* maximum number of cases is 100.
*/
public void setCaseIdList(java.util.Collection<String> caseIdList) {
if (caseIdList == null) {
this.caseIdList = null;
return;
}
this.caseIdList = new com.amazonaws.internal.SdkInternalList<String>(
caseIdList);
}
/**
* <p>
* A list of ID numbers of the support cases you want returned. The maximum
* number of cases is 100.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setCaseIdList(java.util.Collection)} or
* {@link #withCaseIdList(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param caseIdList
* A list of ID numbers of the support cases you want returned. The
* maximum number of cases is 100.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withCaseIdList(String... caseIdList) {
if (this.caseIdList == null) {
setCaseIdList(new com.amazonaws.internal.SdkInternalList<String>(
caseIdList.length));
}
for (String ele : caseIdList) {
this.caseIdList.add(ele);
}
return this;
}
/**
* <p>
* A list of ID numbers of the support cases you want returned. The maximum
* number of cases is 100.
* </p>
*
* @param caseIdList
* A list of ID numbers of the support cases you want returned. The
* maximum number of cases is 100.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withCaseIdList(
java.util.Collection<String> caseIdList) {
setCaseIdList(caseIdList);
return this;
}
/**
* <p>
* The ID displayed for a case in the AWS Support Center user interface.
* </p>
*
* @param displayId
* The ID displayed for a case in the AWS Support Center user
* interface.
*/
public void setDisplayId(String displayId) {
this.displayId = displayId;
}
/**
* <p>
* The ID displayed for a case in the AWS Support Center user interface.
* </p>
*
* @return The ID displayed for a case in the AWS Support Center user
* interface.
*/
public String getDisplayId() {
return this.displayId;
}
/**
* <p>
* The ID displayed for a case in the AWS Support Center user interface.
* </p>
*
* @param displayId
* The ID displayed for a case in the AWS Support Center user
* interface.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withDisplayId(String displayId) {
setDisplayId(displayId);
return this;
}
/**
* <p>
* The start date for a filtered date search on support case communications.
* Case communications are available for 12 months after creation.
* </p>
*
* @param afterTime
* The start date for a filtered date search on support case
* communications. Case communications are available for 12 months
* after creation.
*/
public void setAfterTime(String afterTime) {
this.afterTime = afterTime;
}
/**
* <p>
* The start date for a filtered date search on support case communications.
* Case communications are available for 12 months after creation.
* </p>
*
* @return The start date for a filtered date search on support case
* communications. Case communications are available for 12 months
* after creation.
*/
public String getAfterTime() {
return this.afterTime;
}
/**
* <p>
* The start date for a filtered date search on support case communications.
* Case communications are available for 12 months after creation.
* </p>
*
* @param afterTime
* The start date for a filtered date search on support case
* communications. Case communications are available for 12 months
* after creation.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withAfterTime(String afterTime) {
setAfterTime(afterTime);
return this;
}
/**
* <p>
* The end date for a filtered date search on support case communications.
* Case communications are available for 12 months after creation.
* </p>
*
* @param beforeTime
* The end date for a filtered date search on support case
* communications. Case communications are available for 12 months
* after creation.
*/
public void setBeforeTime(String beforeTime) {
this.beforeTime = beforeTime;
}
/**
* <p>
* The end date for a filtered date search on support case communications.
* Case communications are available for 12 months after creation.
* </p>
*
* @return The end date for a filtered date search on support case
* communications. Case communications are available for 12 months
* after creation.
*/
public String getBeforeTime() {
return this.beforeTime;
}
/**
* <p>
* The end date for a filtered date search on support case communications.
* Case communications are available for 12 months after creation.
* </p>
*
* @param beforeTime
* The end date for a filtered date search on support case
* communications. Case communications are available for 12 months
* after creation.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withBeforeTime(String beforeTime) {
setBeforeTime(beforeTime);
return this;
}
/**
* <p>
* Specifies whether resolved support cases should be included in the
* <a>DescribeCases</a> results. The default is <i>false</i>.
* </p>
*
* @param includeResolvedCases
* Specifies whether resolved support cases should be included in the
* <a>DescribeCases</a> results. The default is <i>false</i>.
*/
public void setIncludeResolvedCases(Boolean includeResolvedCases) {
this.includeResolvedCases = includeResolvedCases;
}
/**
* <p>
* Specifies whether resolved support cases should be included in the
* <a>DescribeCases</a> results. The default is <i>false</i>.
* </p>
*
* @return Specifies whether resolved support cases should be included in
* the <a>DescribeCases</a> results. The default is <i>false</i>.
*/
public Boolean getIncludeResolvedCases() {
return this.includeResolvedCases;
}
/**
* <p>
* Specifies whether resolved support cases should be included in the
* <a>DescribeCases</a> results. The default is <i>false</i>.
* </p>
*
* @param includeResolvedCases
* Specifies whether resolved support cases should be included in the
* <a>DescribeCases</a> results. The default is <i>false</i>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withIncludeResolvedCases(
Boolean includeResolvedCases) {
setIncludeResolvedCases(includeResolvedCases);
return this;
}
/**
* <p>
* Specifies whether resolved support cases should be included in the
* <a>DescribeCases</a> results. The default is <i>false</i>.
* </p>
*
* @return Specifies whether resolved support cases should be included in
* the <a>DescribeCases</a> results. The default is <i>false</i>.
*/
public Boolean isIncludeResolvedCases() {
return this.includeResolvedCases;
}
/**
* <p>
* A resumption point for pagination.
* </p>
*
* @param nextToken
* A resumption point for pagination.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* A resumption point for pagination.
* </p>
*
* @return A resumption point for pagination.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* A resumption point for pagination.
* </p>
*
* @param nextToken
* A resumption point for pagination.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The maximum number of results to return before paginating.
* </p>
*
* @param maxResults
* The maximum number of results to return before paginating.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of results to return before paginating.
* </p>
*
* @return The maximum number of results to return before paginating.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of results to return before paginating.
* </p>
*
* @param maxResults
* The maximum number of results to return before paginating.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* <p>
* The ISO 639-1 code for the language in which AWS provides support. AWS
* Support currently supports English ("en") and Japanese ("ja"). Language
* parameters must be passed explicitly for operations that take them.
* </p>
*
* @param language
* The ISO 639-1 code for the language in which AWS provides support.
* AWS Support currently supports English ("en") and Japanese ("ja").
* Language parameters must be passed explicitly for operations that
* take them.
*/
public void setLanguage(String language) {
this.language = language;
}
/**
* <p>
* The ISO 639-1 code for the language in which AWS provides support. AWS
* Support currently supports English ("en") and Japanese ("ja"). Language
* parameters must be passed explicitly for operations that take them.
* </p>
*
* @return The ISO 639-1 code for the language in which AWS provides
* support. AWS Support currently supports English ("en") and
* Japanese ("ja"). Language parameters must be passed explicitly
* for operations that take them.
*/
public String getLanguage() {
return this.language;
}
/**
* <p>
* The ISO 639-1 code for the language in which AWS provides support. AWS
* Support currently supports English ("en") and Japanese ("ja"). Language
* parameters must be passed explicitly for operations that take them.
* </p>
*
* @param language
* The ISO 639-1 code for the language in which AWS provides support.
* AWS Support currently supports English ("en") and Japanese ("ja").
* Language parameters must be passed explicitly for operations that
* take them.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withLanguage(String language) {
setLanguage(language);
return this;
}
/**
* <p>
* Specifies whether communications should be included in the
* <a>DescribeCases</a> results. The default is <i>true</i>.
* </p>
*
* @param includeCommunications
* Specifies whether communications should be included in the
* <a>DescribeCases</a> results. The default is <i>true</i>.
*/
public void setIncludeCommunications(Boolean includeCommunications) {
this.includeCommunications = includeCommunications;
}
/**
* <p>
* Specifies whether communications should be included in the
* <a>DescribeCases</a> results. The default is <i>true</i>.
* </p>
*
* @return Specifies whether communications should be included in the
* <a>DescribeCases</a> results. The default is <i>true</i>.
*/
public Boolean getIncludeCommunications() {
return this.includeCommunications;
}
/**
* <p>
* Specifies whether communications should be included in the
* <a>DescribeCases</a> results. The default is <i>true</i>.
* </p>
*
* @param includeCommunications
* Specifies whether communications should be included in the
* <a>DescribeCases</a> results. The default is <i>true</i>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DescribeCasesRequest withIncludeCommunications(
Boolean includeCommunications) {
setIncludeCommunications(includeCommunications);
return this;
}
/**
* <p>
* Specifies whether communications should be included in the
* <a>DescribeCases</a> results. The default is <i>true</i>.
* </p>
*
* @return Specifies whether communications should be included in the
* <a>DescribeCases</a> results. The default is <i>true</i>.
*/
public Boolean isIncludeCommunications() {
return this.includeCommunications;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCaseIdList() != null)
sb.append("CaseIdList: " + getCaseIdList() + ",");
if (getDisplayId() != null)
sb.append("DisplayId: " + getDisplayId() + ",");
if (getAfterTime() != null)
sb.append("AfterTime: " + getAfterTime() + ",");
if (getBeforeTime() != null)
sb.append("BeforeTime: " + getBeforeTime() + ",");
if (getIncludeResolvedCases() != null)
sb.append("IncludeResolvedCases: " + getIncludeResolvedCases()
+ ",");
if (getNextToken() != null)
sb.append("NextToken: " + getNextToken() + ",");
if (getMaxResults() != null)
sb.append("MaxResults: " + getMaxResults() + ",");
if (getLanguage() != null)
sb.append("Language: " + getLanguage() + ",");
if (getIncludeCommunications() != null)
sb.append("IncludeCommunications: " + getIncludeCommunications());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeCasesRequest == false)
return false;
DescribeCasesRequest other = (DescribeCasesRequest) obj;
if (other.getCaseIdList() == null ^ this.getCaseIdList() == null)
return false;
if (other.getCaseIdList() != null
&& other.getCaseIdList().equals(this.getCaseIdList()) == false)
return false;
if (other.getDisplayId() == null ^ this.getDisplayId() == null)
return false;
if (other.getDisplayId() != null
&& other.getDisplayId().equals(this.getDisplayId()) == false)
return false;
if (other.getAfterTime() == null ^ this.getAfterTime() == null)
return false;
if (other.getAfterTime() != null
&& other.getAfterTime().equals(this.getAfterTime()) == false)
return false;
if (other.getBeforeTime() == null ^ this.getBeforeTime() == null)
return false;
if (other.getBeforeTime() != null
&& other.getBeforeTime().equals(this.getBeforeTime()) == false)
return false;
if (other.getIncludeResolvedCases() == null
^ this.getIncludeResolvedCases() == null)
return false;
if (other.getIncludeResolvedCases() != null
&& other.getIncludeResolvedCases().equals(
this.getIncludeResolvedCases()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null
&& other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null
&& other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
if (other.getLanguage() == null ^ this.getLanguage() == null)
return false;
if (other.getLanguage() != null
&& other.getLanguage().equals(this.getLanguage()) == false)
return false;
if (other.getIncludeCommunications() == null
^ this.getIncludeCommunications() == null)
return false;
if (other.getIncludeCommunications() != null
&& other.getIncludeCommunications().equals(
this.getIncludeCommunications()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getCaseIdList() == null) ? 0 : getCaseIdList().hashCode());
hashCode = prime * hashCode
+ ((getDisplayId() == null) ? 0 : getDisplayId().hashCode());
hashCode = prime * hashCode
+ ((getAfterTime() == null) ? 0 : getAfterTime().hashCode());
hashCode = prime * hashCode
+ ((getBeforeTime() == null) ? 0 : getBeforeTime().hashCode());
hashCode = prime
* hashCode
+ ((getIncludeResolvedCases() == null) ? 0
: getIncludeResolvedCases().hashCode());
hashCode = prime * hashCode
+ ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode
+ ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
hashCode = prime * hashCode
+ ((getLanguage() == null) ? 0 : getLanguage().hashCode());
hashCode = prime
* hashCode
+ ((getIncludeCommunications() == null) ? 0
: getIncludeCommunications().hashCode());
return hashCode;
}
@Override
public DescribeCasesRequest clone() {
return (DescribeCasesRequest) super.clone();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.repositories.s3;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.HttpMethod;
import com.amazonaws.regions.Region;
import com.amazonaws.services.s3.AbstractAmazonS3;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.S3ClientOptions;
import com.amazonaws.services.s3.S3ResponseMetadata;
import com.amazonaws.services.s3.model.AbortMultipartUploadRequest;
import com.amazonaws.services.s3.model.AccessControlList;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.BucketCrossOriginConfiguration;
import com.amazonaws.services.s3.model.BucketLifecycleConfiguration;
import com.amazonaws.services.s3.model.BucketLoggingConfiguration;
import com.amazonaws.services.s3.model.BucketNotificationConfiguration;
import com.amazonaws.services.s3.model.BucketPolicy;
import com.amazonaws.services.s3.model.BucketReplicationConfiguration;
import com.amazonaws.services.s3.model.BucketTaggingConfiguration;
import com.amazonaws.services.s3.model.BucketVersioningConfiguration;
import com.amazonaws.services.s3.model.BucketWebsiteConfiguration;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest;
import com.amazonaws.services.s3.model.CompleteMultipartUploadResult;
import com.amazonaws.services.s3.model.CopyObjectRequest;
import com.amazonaws.services.s3.model.CopyObjectResult;
import com.amazonaws.services.s3.model.CopyPartRequest;
import com.amazonaws.services.s3.model.CopyPartResult;
import com.amazonaws.services.s3.model.CreateBucketRequest;
import com.amazonaws.services.s3.model.DeleteBucketCrossOriginConfigurationRequest;
import com.amazonaws.services.s3.model.DeleteBucketLifecycleConfigurationRequest;
import com.amazonaws.services.s3.model.DeleteBucketPolicyRequest;
import com.amazonaws.services.s3.model.DeleteBucketReplicationConfigurationRequest;
import com.amazonaws.services.s3.model.DeleteBucketRequest;
import com.amazonaws.services.s3.model.DeleteBucketTaggingConfigurationRequest;
import com.amazonaws.services.s3.model.DeleteBucketWebsiteConfigurationRequest;
import com.amazonaws.services.s3.model.DeleteObjectRequest;
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
import com.amazonaws.services.s3.model.DeleteObjectsResult;
import com.amazonaws.services.s3.model.DeleteVersionRequest;
import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest;
import com.amazonaws.services.s3.model.GetBucketAclRequest;
import com.amazonaws.services.s3.model.GetBucketCrossOriginConfigurationRequest;
import com.amazonaws.services.s3.model.GetBucketLifecycleConfigurationRequest;
import com.amazonaws.services.s3.model.GetBucketLocationRequest;
import com.amazonaws.services.s3.model.GetBucketLoggingConfigurationRequest;
import com.amazonaws.services.s3.model.GetBucketNotificationConfigurationRequest;
import com.amazonaws.services.s3.model.GetBucketPolicyRequest;
import com.amazonaws.services.s3.model.GetBucketReplicationConfigurationRequest;
import com.amazonaws.services.s3.model.GetBucketTaggingConfigurationRequest;
import com.amazonaws.services.s3.model.GetBucketVersioningConfigurationRequest;
import com.amazonaws.services.s3.model.GetBucketWebsiteConfigurationRequest;
import com.amazonaws.services.s3.model.GetObjectAclRequest;
import com.amazonaws.services.s3.model.GetObjectMetadataRequest;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.GetS3AccountOwnerRequest;
import com.amazonaws.services.s3.model.HeadBucketRequest;
import com.amazonaws.services.s3.model.HeadBucketResult;
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadResult;
import com.amazonaws.services.s3.model.ListBucketsRequest;
import com.amazonaws.services.s3.model.ListMultipartUploadsRequest;
import com.amazonaws.services.s3.model.ListNextBatchOfObjectsRequest;
import com.amazonaws.services.s3.model.ListNextBatchOfVersionsRequest;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ListPartsRequest;
import com.amazonaws.services.s3.model.ListVersionsRequest;
import com.amazonaws.services.s3.model.MultipartUploadListing;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.Owner;
import com.amazonaws.services.s3.model.PartListing;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.PutObjectResult;
import com.amazonaws.services.s3.model.RestoreObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.SetBucketAclRequest;
import com.amazonaws.services.s3.model.SetBucketCrossOriginConfigurationRequest;
import com.amazonaws.services.s3.model.SetBucketLifecycleConfigurationRequest;
import com.amazonaws.services.s3.model.SetBucketLoggingConfigurationRequest;
import com.amazonaws.services.s3.model.SetBucketNotificationConfigurationRequest;
import com.amazonaws.services.s3.model.SetBucketPolicyRequest;
import com.amazonaws.services.s3.model.SetBucketReplicationConfigurationRequest;
import com.amazonaws.services.s3.model.SetBucketTaggingConfigurationRequest;
import com.amazonaws.services.s3.model.SetBucketVersioningConfigurationRequest;
import com.amazonaws.services.s3.model.SetBucketWebsiteConfigurationRequest;
import com.amazonaws.services.s3.model.SetObjectAclRequest;
import com.amazonaws.services.s3.model.StorageClass;
import com.amazonaws.services.s3.model.UploadPartRequest;
import com.amazonaws.services.s3.model.UploadPartResult;
import com.amazonaws.services.s3.model.VersionListing;
import org.elasticsearch.common.SuppressForbidden;
import java.io.File;
import java.io.InputStream;
import java.net.URL;
import java.util.Date;
import java.util.List;
@SuppressForbidden(reason = "implements AWS api that uses java.io.File!")
public class AmazonS3Wrapper extends AbstractAmazonS3 {
protected AmazonS3 delegate;
public AmazonS3Wrapper(AmazonS3 delegate) {
this.delegate = delegate;
}
@Override
public void setEndpoint(String endpoint) {
delegate.setEndpoint(endpoint);
}
@Override
public void setRegion(Region region) throws IllegalArgumentException {
delegate.setRegion(region);
}
@Override
public void setS3ClientOptions(S3ClientOptions clientOptions) {
delegate.setS3ClientOptions(clientOptions);
}
@Override
public void changeObjectStorageClass(String bucketName, String key, StorageClass newStorageClass) throws AmazonClientException, AmazonServiceException {
delegate.changeObjectStorageClass(bucketName, key, newStorageClass);
}
@Override
public void setObjectRedirectLocation(String bucketName, String key, String newRedirectLocation) throws AmazonClientException, AmazonServiceException {
delegate.setObjectRedirectLocation(bucketName, key, newRedirectLocation);
}
@Override
public ObjectListing listObjects(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.listObjects(bucketName);
}
@Override
public ObjectListing listObjects(String bucketName, String prefix) throws AmazonClientException, AmazonServiceException {
return delegate.listObjects(bucketName, prefix);
}
@Override
public ObjectListing listObjects(ListObjectsRequest listObjectsRequest) throws AmazonClientException, AmazonServiceException {
return delegate.listObjects(listObjectsRequest);
}
@Override
public ObjectListing listNextBatchOfObjects(ObjectListing previousObjectListing) throws AmazonClientException, AmazonServiceException {
return delegate.listNextBatchOfObjects(previousObjectListing);
}
@Override
public VersionListing listVersions(String bucketName, String prefix) throws AmazonClientException, AmazonServiceException {
return delegate.listVersions(bucketName, prefix);
}
@Override
public VersionListing listNextBatchOfVersions(VersionListing previousVersionListing) throws AmazonClientException, AmazonServiceException {
return delegate.listNextBatchOfVersions(previousVersionListing);
}
@Override
public VersionListing listVersions(String bucketName, String prefix, String keyMarker, String versionIdMarker, String delimiter, Integer maxResults) throws AmazonClientException, AmazonServiceException {
return delegate.listVersions(bucketName, prefix, keyMarker, versionIdMarker, delimiter, maxResults);
}
@Override
public VersionListing listVersions(ListVersionsRequest listVersionsRequest) throws AmazonClientException, AmazonServiceException {
return delegate.listVersions(listVersionsRequest);
}
@Override
public Owner getS3AccountOwner() throws AmazonClientException, AmazonServiceException {
return delegate.getS3AccountOwner();
}
@Override
public boolean doesBucketExist(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.doesBucketExist(bucketName);
}
@Override
public List<Bucket> listBuckets() throws AmazonClientException, AmazonServiceException {
return delegate.listBuckets();
}
@Override
public List<Bucket> listBuckets(ListBucketsRequest listBucketsRequest) throws AmazonClientException, AmazonServiceException {
return delegate.listBuckets(listBucketsRequest);
}
@Override
public String getBucketLocation(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketLocation(bucketName);
}
@Override
public String getBucketLocation(GetBucketLocationRequest getBucketLocationRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketLocation(getBucketLocationRequest);
}
@Override
public Bucket createBucket(CreateBucketRequest createBucketRequest) throws AmazonClientException, AmazonServiceException {
return delegate.createBucket(createBucketRequest);
}
@Override
public Bucket createBucket(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.createBucket(bucketName);
}
@Override
public Bucket createBucket(String bucketName, com.amazonaws.services.s3.model.Region region) throws AmazonClientException, AmazonServiceException {
return delegate.createBucket(bucketName, region);
}
@Override
public Bucket createBucket(String bucketName, String region) throws AmazonClientException, AmazonServiceException {
return delegate.createBucket(bucketName, region);
}
@Override
public AccessControlList getObjectAcl(String bucketName, String key) throws AmazonClientException, AmazonServiceException {
return delegate.getObjectAcl(bucketName, key);
}
@Override
public AccessControlList getObjectAcl(String bucketName, String key, String versionId) throws AmazonClientException, AmazonServiceException {
return delegate.getObjectAcl(bucketName, key, versionId);
}
@Override
public AccessControlList getObjectAcl(GetObjectAclRequest getObjectAclRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getObjectAcl(getObjectAclRequest);
}
@Override
public void setObjectAcl(String bucketName, String key, AccessControlList acl) throws AmazonClientException, AmazonServiceException {
delegate.setObjectAcl(bucketName, key, acl);
}
@Override
public void setObjectAcl(String bucketName, String key, CannedAccessControlList acl) throws AmazonClientException, AmazonServiceException {
delegate.setObjectAcl(bucketName, key, acl);
}
@Override
public void setObjectAcl(String bucketName, String key, String versionId, AccessControlList acl) throws AmazonClientException, AmazonServiceException {
delegate.setObjectAcl(bucketName, key, versionId, acl);
}
@Override
public void setObjectAcl(String bucketName, String key, String versionId, CannedAccessControlList acl) throws AmazonClientException, AmazonServiceException {
delegate.setObjectAcl(bucketName, key, versionId, acl);
}
@Override
public void setObjectAcl(SetObjectAclRequest setObjectAclRequest) throws AmazonClientException, AmazonServiceException {
delegate.setObjectAcl(setObjectAclRequest);
}
@Override
public AccessControlList getBucketAcl(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketAcl(bucketName);
}
@Override
public void setBucketAcl(SetBucketAclRequest setBucketAclRequest) throws AmazonClientException, AmazonServiceException {
delegate.setBucketAcl(setBucketAclRequest);
}
@Override
public AccessControlList getBucketAcl(GetBucketAclRequest getBucketAclRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketAcl(getBucketAclRequest);
}
@Override
public void setBucketAcl(String bucketName, AccessControlList acl) throws AmazonClientException, AmazonServiceException {
delegate.setBucketAcl(bucketName, acl);
}
@Override
public void setBucketAcl(String bucketName, CannedAccessControlList acl) throws AmazonClientException, AmazonServiceException {
delegate.setBucketAcl(bucketName, acl);
}
@Override
public ObjectMetadata getObjectMetadata(String bucketName, String key) throws AmazonClientException, AmazonServiceException {
return delegate.getObjectMetadata(bucketName, key);
}
@Override
public ObjectMetadata getObjectMetadata(GetObjectMetadataRequest getObjectMetadataRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getObjectMetadata(getObjectMetadataRequest);
}
@Override
public S3Object getObject(String bucketName, String key) throws AmazonClientException, AmazonServiceException {
return delegate.getObject(bucketName, key);
}
@Override
public S3Object getObject(GetObjectRequest getObjectRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getObject(getObjectRequest);
}
@Override
public ObjectMetadata getObject(GetObjectRequest getObjectRequest, File destinationFile) throws AmazonClientException, AmazonServiceException {
return delegate.getObject(getObjectRequest, destinationFile);
}
@Override
public void deleteBucket(DeleteBucketRequest deleteBucketRequest) throws AmazonClientException, AmazonServiceException {
delegate.deleteBucket(deleteBucketRequest);
}
@Override
public void deleteBucket(String bucketName) throws AmazonClientException, AmazonServiceException {
delegate.deleteBucket(bucketName);
}
@Override
public void setBucketReplicationConfiguration(String bucketName, BucketReplicationConfiguration configuration) throws AmazonServiceException, AmazonClientException {
delegate.setBucketReplicationConfiguration(bucketName, configuration);
}
@Override
public void setBucketReplicationConfiguration(SetBucketReplicationConfigurationRequest setBucketReplicationConfigurationRequest) throws AmazonServiceException, AmazonClientException {
delegate.setBucketReplicationConfiguration(setBucketReplicationConfigurationRequest);
}
@Override
public BucketReplicationConfiguration getBucketReplicationConfiguration(String bucketName) throws AmazonServiceException, AmazonClientException {
return delegate.getBucketReplicationConfiguration(bucketName);
}
@Override
public void deleteBucketReplicationConfiguration(String bucketName) throws AmazonServiceException, AmazonClientException {
delegate.deleteBucketReplicationConfiguration(bucketName);
}
@Override
public void deleteBucketReplicationConfiguration(DeleteBucketReplicationConfigurationRequest request) throws AmazonServiceException,
AmazonClientException {
delegate.deleteBucketReplicationConfiguration(request);
}
@Override
public boolean doesObjectExist(String bucketName, String objectName) throws AmazonServiceException, AmazonClientException {
return delegate.doesObjectExist(bucketName, objectName);
}
@Override
public PutObjectResult putObject(PutObjectRequest putObjectRequest) throws AmazonClientException, AmazonServiceException {
return delegate.putObject(putObjectRequest);
}
@Override
public PutObjectResult putObject(String bucketName, String key, File file) throws AmazonClientException, AmazonServiceException {
return delegate.putObject(bucketName, key, file);
}
@Override
public PutObjectResult putObject(String bucketName, String key, InputStream input, ObjectMetadata metadata) throws AmazonClientException, AmazonServiceException {
return delegate.putObject(bucketName, key, input, metadata);
}
@Override
public CopyObjectResult copyObject(String sourceBucketName, String sourceKey, String destinationBucketName, String destinationKey) throws AmazonClientException, AmazonServiceException {
return delegate.copyObject(sourceBucketName, sourceKey, destinationBucketName, destinationKey);
}
@Override
public CopyObjectResult copyObject(CopyObjectRequest copyObjectRequest) throws AmazonClientException, AmazonServiceException {
return delegate.copyObject(copyObjectRequest);
}
@Override
public CopyPartResult copyPart(CopyPartRequest copyPartRequest) throws AmazonClientException, AmazonServiceException {
return delegate.copyPart(copyPartRequest);
}
@Override
public void deleteObject(String bucketName, String key) throws AmazonClientException, AmazonServiceException {
delegate.deleteObject(bucketName, key);
}
@Override
public void deleteObject(DeleteObjectRequest deleteObjectRequest) throws AmazonClientException, AmazonServiceException {
delegate.deleteObject(deleteObjectRequest);
}
@Override
public DeleteObjectsResult deleteObjects(DeleteObjectsRequest deleteObjectsRequest) throws AmazonClientException, AmazonServiceException {
return delegate.deleteObjects(deleteObjectsRequest);
}
@Override
public void deleteVersion(String bucketName, String key, String versionId) throws AmazonClientException, AmazonServiceException {
delegate.deleteVersion(bucketName, key, versionId);
}
@Override
public void deleteVersion(DeleteVersionRequest deleteVersionRequest) throws AmazonClientException, AmazonServiceException {
delegate.deleteVersion(deleteVersionRequest);
}
@Override
public BucketLoggingConfiguration getBucketLoggingConfiguration(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketLoggingConfiguration(bucketName);
}
@Override
public void setBucketLoggingConfiguration(SetBucketLoggingConfigurationRequest setBucketLoggingConfigurationRequest) throws AmazonClientException, AmazonServiceException {
delegate.setBucketLoggingConfiguration(setBucketLoggingConfigurationRequest);
}
@Override
public BucketVersioningConfiguration getBucketVersioningConfiguration(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketVersioningConfiguration(bucketName);
}
@Override
public void setBucketVersioningConfiguration(SetBucketVersioningConfigurationRequest setBucketVersioningConfigurationRequest) throws AmazonClientException, AmazonServiceException {
delegate.setBucketVersioningConfiguration(setBucketVersioningConfigurationRequest);
}
@Override
public BucketLifecycleConfiguration getBucketLifecycleConfiguration(String bucketName) {
return delegate.getBucketLifecycleConfiguration(bucketName);
}
@Override
public void setBucketLifecycleConfiguration(String bucketName, BucketLifecycleConfiguration bucketLifecycleConfiguration) {
delegate.setBucketLifecycleConfiguration(bucketName, bucketLifecycleConfiguration);
}
@Override
public void setBucketLifecycleConfiguration(SetBucketLifecycleConfigurationRequest setBucketLifecycleConfigurationRequest) {
delegate.setBucketLifecycleConfiguration(setBucketLifecycleConfigurationRequest);
}
@Override
public void deleteBucketLifecycleConfiguration(String bucketName) {
delegate.deleteBucketLifecycleConfiguration(bucketName);
}
@Override
public void deleteBucketLifecycleConfiguration(DeleteBucketLifecycleConfigurationRequest deleteBucketLifecycleConfigurationRequest) {
delegate.deleteBucketLifecycleConfiguration(deleteBucketLifecycleConfigurationRequest);
}
@Override
public BucketCrossOriginConfiguration getBucketCrossOriginConfiguration(String bucketName) {
return delegate.getBucketCrossOriginConfiguration(bucketName);
}
@Override
public void setBucketCrossOriginConfiguration(String bucketName, BucketCrossOriginConfiguration bucketCrossOriginConfiguration) {
delegate.setBucketCrossOriginConfiguration(bucketName, bucketCrossOriginConfiguration);
}
@Override
public void setBucketCrossOriginConfiguration(SetBucketCrossOriginConfigurationRequest setBucketCrossOriginConfigurationRequest) {
delegate.setBucketCrossOriginConfiguration(setBucketCrossOriginConfigurationRequest);
}
@Override
public void deleteBucketCrossOriginConfiguration(String bucketName) {
delegate.deleteBucketCrossOriginConfiguration(bucketName);
}
@Override
public void deleteBucketCrossOriginConfiguration(DeleteBucketCrossOriginConfigurationRequest deleteBucketCrossOriginConfigurationRequest) {
delegate.deleteBucketCrossOriginConfiguration(deleteBucketCrossOriginConfigurationRequest);
}
@Override
public BucketTaggingConfiguration getBucketTaggingConfiguration(String bucketName) {
return delegate.getBucketTaggingConfiguration(bucketName);
}
@Override
public void setBucketTaggingConfiguration(String bucketName, BucketTaggingConfiguration bucketTaggingConfiguration) {
delegate.setBucketTaggingConfiguration(bucketName, bucketTaggingConfiguration);
}
@Override
public void setBucketTaggingConfiguration(SetBucketTaggingConfigurationRequest setBucketTaggingConfigurationRequest) {
delegate.setBucketTaggingConfiguration(setBucketTaggingConfigurationRequest);
}
@Override
public void deleteBucketTaggingConfiguration(String bucketName) {
delegate.deleteBucketTaggingConfiguration(bucketName);
}
@Override
public void deleteBucketTaggingConfiguration(DeleteBucketTaggingConfigurationRequest deleteBucketTaggingConfigurationRequest) {
delegate.deleteBucketTaggingConfiguration(deleteBucketTaggingConfigurationRequest);
}
@Override
public BucketNotificationConfiguration getBucketNotificationConfiguration(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketNotificationConfiguration(bucketName);
}
@Override
public void setBucketNotificationConfiguration(SetBucketNotificationConfigurationRequest setBucketNotificationConfigurationRequest) throws AmazonClientException, AmazonServiceException {
delegate.setBucketNotificationConfiguration(setBucketNotificationConfigurationRequest);
}
@Override
public void setBucketNotificationConfiguration(String bucketName, BucketNotificationConfiguration bucketNotificationConfiguration) throws AmazonClientException, AmazonServiceException {
delegate.setBucketNotificationConfiguration(bucketName, bucketNotificationConfiguration);
}
@Override
public BucketWebsiteConfiguration getBucketWebsiteConfiguration(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketWebsiteConfiguration(bucketName);
}
@Override
public BucketWebsiteConfiguration getBucketWebsiteConfiguration(GetBucketWebsiteConfigurationRequest getBucketWebsiteConfigurationRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketWebsiteConfiguration(getBucketWebsiteConfigurationRequest);
}
@Override
public void setBucketWebsiteConfiguration(String bucketName, BucketWebsiteConfiguration configuration) throws AmazonClientException, AmazonServiceException {
delegate.setBucketWebsiteConfiguration(bucketName, configuration);
}
@Override
public void setBucketWebsiteConfiguration(SetBucketWebsiteConfigurationRequest setBucketWebsiteConfigurationRequest) throws AmazonClientException, AmazonServiceException {
delegate.setBucketWebsiteConfiguration(setBucketWebsiteConfigurationRequest);
}
@Override
public void deleteBucketWebsiteConfiguration(String bucketName) throws AmazonClientException, AmazonServiceException {
delegate.deleteBucketWebsiteConfiguration(bucketName);
}
@Override
public void deleteBucketWebsiteConfiguration(DeleteBucketWebsiteConfigurationRequest deleteBucketWebsiteConfigurationRequest) throws AmazonClientException, AmazonServiceException {
delegate.deleteBucketWebsiteConfiguration(deleteBucketWebsiteConfigurationRequest);
}
@Override
public BucketPolicy getBucketPolicy(String bucketName) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketPolicy(bucketName);
}
@Override
public BucketPolicy getBucketPolicy(GetBucketPolicyRequest getBucketPolicyRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketPolicy(getBucketPolicyRequest);
}
@Override
public void setBucketPolicy(String bucketName, String policyText) throws AmazonClientException, AmazonServiceException {
delegate.setBucketPolicy(bucketName, policyText);
}
@Override
public void setBucketPolicy(SetBucketPolicyRequest setBucketPolicyRequest) throws AmazonClientException, AmazonServiceException {
delegate.setBucketPolicy(setBucketPolicyRequest);
}
@Override
public void deleteBucketPolicy(String bucketName) throws AmazonClientException, AmazonServiceException {
delegate.deleteBucketPolicy(bucketName);
}
@Override
public void deleteBucketPolicy(DeleteBucketPolicyRequest deleteBucketPolicyRequest) throws AmazonClientException, AmazonServiceException {
delegate.deleteBucketPolicy(deleteBucketPolicyRequest);
}
@Override
public URL generatePresignedUrl(String bucketName, String key, Date expiration) throws AmazonClientException {
return delegate.generatePresignedUrl(bucketName, key, expiration);
}
@Override
public URL generatePresignedUrl(String bucketName, String key, Date expiration, HttpMethod method) throws AmazonClientException {
return delegate.generatePresignedUrl(bucketName, key, expiration, method);
}
@Override
public URL generatePresignedUrl(GeneratePresignedUrlRequest generatePresignedUrlRequest) throws AmazonClientException {
return delegate.generatePresignedUrl(generatePresignedUrlRequest);
}
@Override
public InitiateMultipartUploadResult initiateMultipartUpload(InitiateMultipartUploadRequest request) throws AmazonClientException, AmazonServiceException {
return delegate.initiateMultipartUpload(request);
}
@Override
public UploadPartResult uploadPart(UploadPartRequest request) throws AmazonClientException, AmazonServiceException {
return delegate.uploadPart(request);
}
@Override
public PartListing listParts(ListPartsRequest request) throws AmazonClientException, AmazonServiceException {
return delegate.listParts(request);
}
@Override
public void abortMultipartUpload(AbortMultipartUploadRequest request) throws AmazonClientException, AmazonServiceException {
delegate.abortMultipartUpload(request);
}
@Override
public CompleteMultipartUploadResult completeMultipartUpload(CompleteMultipartUploadRequest request) throws AmazonClientException, AmazonServiceException {
return delegate.completeMultipartUpload(request);
}
@Override
public MultipartUploadListing listMultipartUploads(ListMultipartUploadsRequest request) throws AmazonClientException, AmazonServiceException {
return delegate.listMultipartUploads(request);
}
@Override
public S3ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request) {
return delegate.getCachedResponseMetadata(request);
}
@Override
public void restoreObject(RestoreObjectRequest copyGlacierObjectRequest) throws AmazonServiceException {
delegate.restoreObject(copyGlacierObjectRequest);
}
@Override
public void restoreObject(String bucketName, String key, int expirationInDays) throws AmazonServiceException {
delegate.restoreObject(bucketName, key, expirationInDays);
}
@Override
public void enableRequesterPays(String bucketName) throws AmazonServiceException, AmazonClientException {
delegate.enableRequesterPays(bucketName);
}
@Override
public void disableRequesterPays(String bucketName) throws AmazonServiceException, AmazonClientException {
delegate.disableRequesterPays(bucketName);
}
@Override
public boolean isRequesterPaysEnabled(String bucketName) throws AmazonServiceException, AmazonClientException {
return delegate.isRequesterPaysEnabled(bucketName);
}
@Override
public ObjectListing listNextBatchOfObjects(ListNextBatchOfObjectsRequest listNextBatchOfObjectsRequest) throws AmazonClientException, AmazonServiceException {
return delegate.listNextBatchOfObjects(listNextBatchOfObjectsRequest);
}
@Override
public VersionListing listNextBatchOfVersions(ListNextBatchOfVersionsRequest listNextBatchOfVersionsRequest) throws AmazonClientException, AmazonServiceException {
return delegate.listNextBatchOfVersions(listNextBatchOfVersionsRequest);
}
@Override
public Owner getS3AccountOwner(GetS3AccountOwnerRequest getS3AccountOwnerRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getS3AccountOwner(getS3AccountOwnerRequest);
}
@Override
public BucketLoggingConfiguration getBucketLoggingConfiguration(GetBucketLoggingConfigurationRequest getBucketLoggingConfigurationRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketLoggingConfiguration(getBucketLoggingConfigurationRequest);
}
@Override
public BucketVersioningConfiguration getBucketVersioningConfiguration(GetBucketVersioningConfigurationRequest getBucketVersioningConfigurationRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketVersioningConfiguration(getBucketVersioningConfigurationRequest);
}
@Override
public BucketLifecycleConfiguration getBucketLifecycleConfiguration(GetBucketLifecycleConfigurationRequest getBucketLifecycleConfigurationRequest) {
return delegate.getBucketLifecycleConfiguration(getBucketLifecycleConfigurationRequest);
}
@Override
public BucketCrossOriginConfiguration getBucketCrossOriginConfiguration(GetBucketCrossOriginConfigurationRequest getBucketCrossOriginConfigurationRequest) {
return delegate.getBucketCrossOriginConfiguration(getBucketCrossOriginConfigurationRequest);
}
@Override
public BucketTaggingConfiguration getBucketTaggingConfiguration(GetBucketTaggingConfigurationRequest getBucketTaggingConfigurationRequest) {
return delegate.getBucketTaggingConfiguration(getBucketTaggingConfigurationRequest);
}
@Override
public BucketNotificationConfiguration getBucketNotificationConfiguration(GetBucketNotificationConfigurationRequest getBucketNotificationConfigurationRequest) throws AmazonClientException, AmazonServiceException {
return delegate.getBucketNotificationConfiguration(getBucketNotificationConfigurationRequest);
}
@Override
public BucketReplicationConfiguration getBucketReplicationConfiguration(GetBucketReplicationConfigurationRequest getBucketReplicationConfigurationRequest) throws AmazonServiceException, AmazonClientException {
return delegate.getBucketReplicationConfiguration(getBucketReplicationConfigurationRequest);
}
@Override
public HeadBucketResult headBucket(HeadBucketRequest headBucketRequest) throws AmazonClientException, AmazonServiceException {
return delegate.headBucket(headBucketRequest);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.freon;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.hdds.client.OzoneQuota;
import org.apache.hadoop.hdds.client.ReplicationFactor;
import org.apache.hadoop.hdds.client.ReplicationType;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.client.ObjectStore;
import org.apache.hadoop.ozone.client.OzoneBucket;
import org.apache.hadoop.ozone.client.OzoneClient;
import org.apache.hadoop.ozone.client.OzoneClientFactory;
import org.apache.hadoop.ozone.client.OzoneVolume;
import org.apache.hadoop.ozone.client.io.OzoneInputStream;
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.VersionInfo;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.UniformReservoir;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.annotations.VisibleForTesting;
import static java.lang.Math.min;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.time.DurationFormatUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
import picocli.CommandLine.ParentCommand;
/**
* Data generator tool to generate as much keys as possible.
*/
@Command(name = "randomkeys",
aliases = "rk",
description = "Generate volumes/buckets and put generated keys.",
versionProvider = HddsVersionProvider.class,
mixinStandardHelpOptions = true,
showDefaultValues = true)
public final class RandomKeyGenerator implements Callable<Void> {
@ParentCommand
private Freon freon;
enum FreonOps {
VOLUME_CREATE,
BUCKET_CREATE,
KEY_CREATE,
KEY_WRITE
}
private static final String RATIS = "ratis";
private static final String DURATION_FORMAT = "HH:mm:ss,SSS";
private static final int QUANTILES = 10;
private static final Logger LOG =
LoggerFactory.getLogger(RandomKeyGenerator.class);
private boolean completed = false;
private boolean exception = false;
@Option(names = "--numOfThreads",
description = "number of threads to be launched for the run",
defaultValue = "10")
private int numOfThreads = 10;
@Option(names = "--numOfVolumes",
description = "specifies number of Volumes to be created in offline mode",
defaultValue = "10")
private int numOfVolumes = 10;
@Option(names = "--numOfBuckets",
description = "specifies number of Buckets to be created per Volume",
defaultValue = "1000")
private int numOfBuckets = 1000;
@Option(
names = "--numOfKeys",
description = "specifies number of Keys to be created per Bucket",
defaultValue = "500000"
)
private int numOfKeys = 500000;
@Option(
names = "--keySize",
description = "Specifies the size of Key in bytes to be created",
defaultValue = "10240"
)
private int keySize = 10240;
@Option(
names = "--json",
description = "directory where json is created."
)
private String jsonDir;
@Option(
names = "--replicationType",
description = "Replication type (STAND_ALONE, RATIS)",
defaultValue = "STAND_ALONE"
)
private ReplicationType type = ReplicationType.STAND_ALONE;
@Option(
names = "--factor",
description = "Replication factor (ONE, THREE)",
defaultValue = "ONE"
)
private ReplicationFactor factor = ReplicationFactor.ONE;
private int threadPoolSize;
private byte[] keyValue = null;
private boolean validateWrites;
private OzoneClient ozoneClient;
private ObjectStore objectStore;
private ExecutorService processor;
private long startTime;
private long jobStartTime;
private AtomicLong volumeCreationTime;
private AtomicLong bucketCreationTime;
private AtomicLong keyCreationTime;
private AtomicLong keyWriteTime;
private AtomicLong totalBytesWritten;
private AtomicInteger numberOfVolumesCreated;
private AtomicInteger numberOfBucketsCreated;
private AtomicLong numberOfKeysAdded;
private Long totalWritesValidated;
private Long writeValidationSuccessCount;
private Long writeValidationFailureCount;
private BlockingQueue<KeyValue> validationQueue;
private ArrayList<Histogram> histograms = new ArrayList<>();
private OzoneConfiguration ozoneConfiguration;
private ProgressBar progressbar;
RandomKeyGenerator() {
}
@VisibleForTesting
RandomKeyGenerator(OzoneConfiguration ozoneConfiguration) {
this.ozoneConfiguration = ozoneConfiguration;
}
public void init(OzoneConfiguration configuration) throws IOException {
startTime = System.nanoTime();
jobStartTime = System.currentTimeMillis();
volumeCreationTime = new AtomicLong();
bucketCreationTime = new AtomicLong();
keyCreationTime = new AtomicLong();
keyWriteTime = new AtomicLong();
totalBytesWritten = new AtomicLong();
numberOfVolumesCreated = new AtomicInteger();
numberOfBucketsCreated = new AtomicInteger();
numberOfKeysAdded = new AtomicLong();
ozoneClient = OzoneClientFactory.getClient(configuration);
objectStore = ozoneClient.getObjectStore();
for (FreonOps ops : FreonOps.values()) {
histograms.add(ops.ordinal(), new Histogram(new UniformReservoir()));
}
}
@Override
public Void call() throws Exception {
if (ozoneConfiguration != null) {
init(ozoneConfiguration);
} else {
init(freon.createOzoneConfiguration());
}
keyValue =
DFSUtil.string2Bytes(RandomStringUtils.randomAscii(keySize - 36));
LOG.info("Number of Threads: " + numOfThreads);
threadPoolSize =
min(numOfVolumes, numOfThreads);
processor = Executors.newFixedThreadPool(threadPoolSize);
addShutdownHook();
LOG.info("Number of Volumes: {}.", numOfVolumes);
LOG.info("Number of Buckets per Volume: {}.", numOfBuckets);
LOG.info("Number of Keys per Bucket: {}.", numOfKeys);
LOG.info("Key size: {} bytes", keySize);
for (int i = 0; i < numOfVolumes; i++) {
String volume = "vol-" + i + "-" +
RandomStringUtils.randomNumeric(5);
processor.submit(new OfflineProcessor(volume));
}
Thread validator = null;
if (validateWrites) {
totalWritesValidated = 0L;
writeValidationSuccessCount = 0L;
writeValidationFailureCount = 0L;
validationQueue =
new ArrayBlockingQueue<>(numOfThreads);
validator = new Thread(new Validator());
validator.start();
LOG.info("Data validation is enabled.");
}
Supplier<Long> currentValue;
long maxValue;
currentValue = () -> numberOfKeysAdded.get();
maxValue = numOfVolumes *
numOfBuckets *
numOfKeys;
progressbar = new ProgressBar(System.out, maxValue, currentValue);
LOG.info("Starting progress bar Thread.");
progressbar.start();
processor.shutdown();
processor.awaitTermination(Integer.MAX_VALUE, TimeUnit.MILLISECONDS);
progressbar.shutdown();
if (validateWrites) {
validator.join();
}
ozoneClient.close();
return null;
}
private void parseOptions(CommandLine cmdLine) {
if (keySize < 1024) {
throw new IllegalArgumentException(
"keySize can not be less than 1024 bytes");
}
}
/**
* Adds ShutdownHook to print statistics.
*/
private void addShutdownHook() {
Runtime.getRuntime().addShutdownHook(
new Thread(() -> printStats(System.out)));
}
/**
* Prints stats of {@link Freon} run to the PrintStream.
*
* @param out PrintStream
*/
private void printStats(PrintStream out) {
long endTime = System.nanoTime() - startTime;
String execTime = DurationFormatUtils
.formatDuration(TimeUnit.NANOSECONDS.toMillis(endTime),
DURATION_FORMAT);
long volumeTime = TimeUnit.NANOSECONDS.toMillis(volumeCreationTime.get())
/ threadPoolSize;
String prettyAverageVolumeTime =
DurationFormatUtils.formatDuration(volumeTime, DURATION_FORMAT);
long bucketTime = TimeUnit.NANOSECONDS.toMillis(bucketCreationTime.get())
/ threadPoolSize;
String prettyAverageBucketTime =
DurationFormatUtils.formatDuration(bucketTime, DURATION_FORMAT);
long averageKeyCreationTime =
TimeUnit.NANOSECONDS.toMillis(keyCreationTime.get())
/ threadPoolSize;
String prettyAverageKeyCreationTime = DurationFormatUtils
.formatDuration(averageKeyCreationTime, DURATION_FORMAT);
long averageKeyWriteTime =
TimeUnit.NANOSECONDS.toMillis(keyWriteTime.get()) / threadPoolSize;
String prettyAverageKeyWriteTime = DurationFormatUtils
.formatDuration(averageKeyWriteTime, DURATION_FORMAT);
out.println();
out.println("***************************************************");
out.println("Status: " + (exception ? "Failed" : "Success"));
out.println("Git Base Revision: " + VersionInfo.getRevision());
out.println("Number of Volumes created: " + numberOfVolumesCreated);
out.println("Number of Buckets created: " + numberOfBucketsCreated);
out.println("Number of Keys added: " + numberOfKeysAdded);
out.println("Ratis replication factor: " + factor.name());
out.println("Ratis replication type: " + type.name());
out.println(
"Average Time spent in volume creation: " + prettyAverageVolumeTime);
out.println(
"Average Time spent in bucket creation: " + prettyAverageBucketTime);
out.println(
"Average Time spent in key creation: " + prettyAverageKeyCreationTime);
out.println(
"Average Time spent in key write: " + prettyAverageKeyWriteTime);
out.println("Total bytes written: " + totalBytesWritten);
if (validateWrites) {
out.println("Total number of writes validated: " +
totalWritesValidated);
out.println("Writes validated: " +
(100.0 * totalWritesValidated / numberOfKeysAdded.get())
+ " %");
out.println("Successful validation: " +
writeValidationSuccessCount);
out.println("Unsuccessful validation: " +
writeValidationFailureCount);
}
out.println("Total Execution time: " + execTime);
out.println("***************************************************");
if (jsonDir != null) {
String[][] quantileTime =
new String[FreonOps.values().length][QUANTILES + 1];
String[] deviations = new String[FreonOps.values().length];
String[] means = new String[FreonOps.values().length];
for (FreonOps ops : FreonOps.values()) {
Snapshot snapshot = histograms.get(ops.ordinal()).getSnapshot();
for (int i = 0; i <= QUANTILES; i++) {
quantileTime[ops.ordinal()][i] = DurationFormatUtils.formatDuration(
TimeUnit.NANOSECONDS
.toMillis((long) snapshot.getValue((1.0 / QUANTILES) * i)),
DURATION_FORMAT);
}
deviations[ops.ordinal()] = DurationFormatUtils.formatDuration(
TimeUnit.NANOSECONDS.toMillis((long) snapshot.getStdDev()),
DURATION_FORMAT);
means[ops.ordinal()] = DurationFormatUtils.formatDuration(
TimeUnit.NANOSECONDS.toMillis((long) snapshot.getMean()),
DURATION_FORMAT);
}
FreonJobInfo jobInfo = new FreonJobInfo().setExecTime(execTime)
.setGitBaseRevision(VersionInfo.getRevision())
.setMeanVolumeCreateTime(means[FreonOps.VOLUME_CREATE.ordinal()])
.setDeviationVolumeCreateTime(
deviations[FreonOps.VOLUME_CREATE.ordinal()])
.setTenQuantileVolumeCreateTime(
quantileTime[FreonOps.VOLUME_CREATE.ordinal()])
.setMeanBucketCreateTime(means[FreonOps.BUCKET_CREATE.ordinal()])
.setDeviationBucketCreateTime(
deviations[FreonOps.BUCKET_CREATE.ordinal()])
.setTenQuantileBucketCreateTime(
quantileTime[FreonOps.BUCKET_CREATE.ordinal()])
.setMeanKeyCreateTime(means[FreonOps.KEY_CREATE.ordinal()])
.setDeviationKeyCreateTime(deviations[FreonOps.KEY_CREATE.ordinal()])
.setTenQuantileKeyCreateTime(
quantileTime[FreonOps.KEY_CREATE.ordinal()])
.setMeanKeyWriteTime(means[FreonOps.KEY_WRITE.ordinal()])
.setDeviationKeyWriteTime(deviations[FreonOps.KEY_WRITE.ordinal()])
.setTenQuantileKeyWriteTime(
quantileTime[FreonOps.KEY_WRITE.ordinal()]);
String jsonName =
new SimpleDateFormat("yyyyMMddHHmmss").format(Time.now()) + ".json";
String jsonPath = jsonDir + "/" + jsonName;
FileOutputStream os = null;
try {
os = new FileOutputStream(jsonPath);
ObjectMapper mapper = new ObjectMapper();
mapper.setVisibility(PropertyAccessor.FIELD,
JsonAutoDetect.Visibility.ANY);
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
writer.writeValue(os, jobInfo);
} catch (FileNotFoundException e) {
out.println("Json File could not be created for the path: " + jsonPath);
out.println(e);
} catch (IOException e) {
out.println("Json object could not be created");
out.println(e);
} finally {
try {
if (os != null) {
os.close();
}
} catch (IOException e) {
LOG.warn("Could not close the output stream for json", e);
}
}
}
}
/**
* Returns the number of volumes created.
*
* @return volume count.
*/
@VisibleForTesting
int getNumberOfVolumesCreated() {
return numberOfVolumesCreated.get();
}
/**
* Returns the number of buckets created.
*
* @return bucket count.
*/
@VisibleForTesting
int getNumberOfBucketsCreated() {
return numberOfBucketsCreated.get();
}
/**
* Returns the number of keys added.
*
* @return keys count.
*/
@VisibleForTesting
long getNumberOfKeysAdded() {
return numberOfKeysAdded.get();
}
/**
* Returns true if random validation of write is enabled.
*
* @return validateWrites
*/
@VisibleForTesting
boolean getValidateWrites() {
return validateWrites;
}
/**
* Returns the number of keys validated.
*
* @return validated key count.
*/
@VisibleForTesting
long getTotalKeysValidated() {
return totalWritesValidated;
}
/**
* Returns the number of successful validation.
*
* @return successful validation count.
*/
@VisibleForTesting
long getSuccessfulValidationCount() {
return writeValidationSuccessCount;
}
/**
* Returns the number of unsuccessful validation.
*
* @return unsuccessful validation count.
*/
@VisibleForTesting
long getUnsuccessfulValidationCount() {
return writeValidationFailureCount;
}
/**
* Returns the length of the common key value initialized.
*
* @return key value length initialized.
*/
@VisibleForTesting
long getKeyValueLength() {
return keyValue.length;
}
/**
* Wrapper to hold ozone key-value pair.
*/
private static class KeyValue {
/**
* Bucket name associated with the key-value.
*/
private OzoneBucket bucket;
/**
* Key name associated with the key-value.
*/
private String key;
/**
* Value associated with the key-value.
*/
private byte[] value;
/**
* Constructs a new ozone key-value pair.
*
* @param key key part
* @param value value part
*/
KeyValue(OzoneBucket bucket, String key, byte[] value) {
this.bucket = bucket;
this.key = key;
this.value = value;
}
}
private class OfflineProcessor implements Runnable {
private int totalBuckets;
private int totalKeys;
private String volumeName;
OfflineProcessor(String volumeName) {
this.totalBuckets = numOfBuckets;
this.totalKeys = numOfKeys;
this.volumeName = volumeName;
}
@Override
public void run() {
LOG.trace("Creating volume: {}", volumeName);
long start = System.nanoTime();
OzoneVolume volume;
try {
objectStore.createVolume(volumeName);
long volumeCreationDuration = System.nanoTime() - start;
volumeCreationTime.getAndAdd(volumeCreationDuration);
histograms.get(FreonOps.VOLUME_CREATE.ordinal())
.update(volumeCreationDuration);
numberOfVolumesCreated.getAndIncrement();
volume = objectStore.getVolume(volumeName);
} catch (IOException e) {
exception = true;
LOG.error("Could not create volume", e);
return;
}
Long threadKeyWriteTime = 0L;
for (int j = 0; j < totalBuckets; j++) {
String bucketName = "bucket-" + j + "-" +
RandomStringUtils.randomNumeric(5);
try {
LOG.trace("Creating bucket: {} in volume: {}",
bucketName, volume.getName());
start = System.nanoTime();
volume.createBucket(bucketName);
long bucketCreationDuration = System.nanoTime() - start;
histograms.get(FreonOps.BUCKET_CREATE.ordinal())
.update(bucketCreationDuration);
bucketCreationTime.getAndAdd(bucketCreationDuration);
numberOfBucketsCreated.getAndIncrement();
OzoneBucket bucket = volume.getBucket(bucketName);
for (int k = 0; k < totalKeys; k++) {
String key = "key-" + k + "-" +
RandomStringUtils.randomNumeric(5);
byte[] randomValue =
DFSUtil.string2Bytes(UUID.randomUUID().toString());
try {
LOG.trace("Adding key: {} in bucket: {} of volume: {}",
key, bucket, volume);
long keyCreateStart = System.nanoTime();
OzoneOutputStream os =
bucket.createKey(key, keySize, type, factor);
long keyCreationDuration = System.nanoTime() - keyCreateStart;
histograms.get(FreonOps.KEY_CREATE.ordinal())
.update(keyCreationDuration);
keyCreationTime.getAndAdd(keyCreationDuration);
long keyWriteStart = System.nanoTime();
os.write(keyValue);
os.write(randomValue);
os.close();
long keyWriteDuration = System.nanoTime() - keyWriteStart;
threadKeyWriteTime += keyWriteDuration;
histograms.get(FreonOps.KEY_WRITE.ordinal())
.update(keyWriteDuration);
totalBytesWritten.getAndAdd(keySize);
numberOfKeysAdded.getAndIncrement();
if (validateWrites) {
byte[] value = ArrayUtils.addAll(keyValue, randomValue);
boolean validate = validationQueue.offer(
new KeyValue(bucket, key, value));
if (validate) {
LOG.trace("Key {}, is queued for validation.", key);
}
}
} catch (Exception e) {
exception = true;
LOG.error("Exception while adding key: {} in bucket: {}" +
" of volume: {}.", key, bucket, volume, e);
}
}
} catch (Exception e) {
exception = true;
LOG.error("Exception while creating bucket: {}" +
" in volume: {}.", bucketName, volume, e);
}
}
keyWriteTime.getAndAdd(threadKeyWriteTime);
}
}
private final class FreonJobInfo {
private String status;
private String gitBaseRevision;
private String jobStartTime;
private int numOfVolumes;
private int numOfBuckets;
private int numOfKeys;
private int numOfThreads;
private String dataWritten;
private String execTime;
private String replicationFactor;
private String replicationType;
private int keySize;
private String totalThroughputPerSecond;
private String meanVolumeCreateTime;
private String deviationVolumeCreateTime;
private String[] tenQuantileVolumeCreateTime;
private String meanBucketCreateTime;
private String deviationBucketCreateTime;
private String[] tenQuantileBucketCreateTime;
private String meanKeyCreateTime;
private String deviationKeyCreateTime;
private String[] tenQuantileKeyCreateTime;
private String meanKeyWriteTime;
private String deviationKeyWriteTime;
private String[] tenQuantileKeyWriteTime;
private FreonJobInfo() {
this.status = exception ? "Failed" : "Success";
this.numOfVolumes = RandomKeyGenerator.this.numOfVolumes;
this.numOfBuckets = RandomKeyGenerator.this.numOfBuckets;
this.numOfKeys = RandomKeyGenerator.this.numOfKeys;
this.numOfThreads = RandomKeyGenerator.this.numOfThreads;
this.keySize = RandomKeyGenerator.this.keySize;
this.jobStartTime = Time.formatTime(RandomKeyGenerator.this.jobStartTime);
this.replicationFactor = RandomKeyGenerator.this.factor.name();
this.replicationType = RandomKeyGenerator.this.type.name();
long totalBytes =
(long) numOfVolumes * numOfBuckets * numOfKeys * keySize;
this.dataWritten = getInStorageUnits((double) totalBytes);
this.totalThroughputPerSecond = getInStorageUnits(
(totalBytes * 1.0) / TimeUnit.NANOSECONDS
.toSeconds(
RandomKeyGenerator.this.keyWriteTime.get() / threadPoolSize));
}
private String getInStorageUnits(Double value) {
double size;
OzoneQuota.Units unit;
if ((long) (value / OzoneConsts.TB) != 0) {
size = value / OzoneConsts.TB;
unit = OzoneQuota.Units.TB;
} else if ((long) (value / OzoneConsts.GB) != 0) {
size = value / OzoneConsts.GB;
unit = OzoneQuota.Units.GB;
} else if ((long) (value / OzoneConsts.MB) != 0) {
size = value / OzoneConsts.MB;
unit = OzoneQuota.Units.MB;
} else if ((long) (value / OzoneConsts.KB) != 0) {
size = value / OzoneConsts.KB;
unit = OzoneQuota.Units.KB;
} else {
size = value;
unit = OzoneQuota.Units.BYTES;
}
return size + " " + unit;
}
public FreonJobInfo setGitBaseRevision(String gitBaseRevisionVal) {
gitBaseRevision = gitBaseRevisionVal;
return this;
}
public FreonJobInfo setExecTime(String execTimeVal) {
execTime = execTimeVal;
return this;
}
public FreonJobInfo setMeanKeyWriteTime(String deviationKeyWriteTimeVal) {
this.meanKeyWriteTime = deviationKeyWriteTimeVal;
return this;
}
public FreonJobInfo setDeviationKeyWriteTime(
String deviationKeyWriteTimeVal) {
this.deviationKeyWriteTime = deviationKeyWriteTimeVal;
return this;
}
public FreonJobInfo setTenQuantileKeyWriteTime(
String[] tenQuantileKeyWriteTimeVal) {
this.tenQuantileKeyWriteTime = tenQuantileKeyWriteTimeVal;
return this;
}
public FreonJobInfo setMeanKeyCreateTime(String deviationKeyWriteTimeVal) {
this.meanKeyCreateTime = deviationKeyWriteTimeVal;
return this;
}
public FreonJobInfo setDeviationKeyCreateTime(
String deviationKeyCreateTimeVal) {
this.deviationKeyCreateTime = deviationKeyCreateTimeVal;
return this;
}
public FreonJobInfo setTenQuantileKeyCreateTime(
String[] tenQuantileKeyCreateTimeVal) {
this.tenQuantileKeyCreateTime = tenQuantileKeyCreateTimeVal;
return this;
}
public FreonJobInfo setMeanBucketCreateTime(
String deviationKeyWriteTimeVal) {
this.meanBucketCreateTime = deviationKeyWriteTimeVal;
return this;
}
public FreonJobInfo setDeviationBucketCreateTime(
String deviationBucketCreateTimeVal) {
this.deviationBucketCreateTime = deviationBucketCreateTimeVal;
return this;
}
public FreonJobInfo setTenQuantileBucketCreateTime(
String[] tenQuantileBucketCreateTimeVal) {
this.tenQuantileBucketCreateTime = tenQuantileBucketCreateTimeVal;
return this;
}
public FreonJobInfo setMeanVolumeCreateTime(
String deviationKeyWriteTimeVal) {
this.meanVolumeCreateTime = deviationKeyWriteTimeVal;
return this;
}
public FreonJobInfo setDeviationVolumeCreateTime(
String deviationVolumeCreateTimeVal) {
this.deviationVolumeCreateTime = deviationVolumeCreateTimeVal;
return this;
}
public FreonJobInfo setTenQuantileVolumeCreateTime(
String[] tenQuantileVolumeCreateTimeVal) {
this.tenQuantileVolumeCreateTime = tenQuantileVolumeCreateTimeVal;
return this;
}
public String getJobStartTime() {
return jobStartTime;
}
public int getNumOfVolumes() {
return numOfVolumes;
}
public int getNumOfBuckets() {
return numOfBuckets;
}
public int getNumOfKeys() {
return numOfKeys;
}
public int getNumOfThreads() {
return numOfThreads;
}
public String getExecTime() {
return execTime;
}
public String getReplicationFactor() {
return replicationFactor;
}
public String getReplicationType() {
return replicationType;
}
public String getStatus() {
return status;
}
public int getKeySize() {
return keySize;
}
public String getGitBaseRevision() {
return gitBaseRevision;
}
public String getDataWritten() {
return dataWritten;
}
public String getTotalThroughputPerSecond() {
return totalThroughputPerSecond;
}
public String getMeanVolumeCreateTime() {
return meanVolumeCreateTime;
}
public String getDeviationVolumeCreateTime() {
return deviationVolumeCreateTime;
}
public String[] getTenQuantileVolumeCreateTime() {
return tenQuantileVolumeCreateTime;
}
public String getMeanBucketCreateTime() {
return meanBucketCreateTime;
}
public String getDeviationBucketCreateTime() {
return deviationBucketCreateTime;
}
public String[] getTenQuantileBucketCreateTime() {
return tenQuantileBucketCreateTime;
}
public String getMeanKeyCreateTime() {
return meanKeyCreateTime;
}
public String getDeviationKeyCreateTime() {
return deviationKeyCreateTime;
}
public String[] getTenQuantileKeyCreateTime() {
return tenQuantileKeyCreateTime;
}
public String getMeanKeyWriteTime() {
return meanKeyWriteTime;
}
public String getDeviationKeyWriteTime() {
return deviationKeyWriteTime;
}
public String[] getTenQuantileKeyWriteTime() {
return tenQuantileKeyWriteTime;
}
}
/**
* Validates the write done in ozone cluster.
*/
private class Validator implements Runnable {
@Override
public void run() {
while (!completed) {
try {
KeyValue kv = validationQueue.poll(5, TimeUnit.SECONDS);
if (kv != null) {
OzoneInputStream is = kv.bucket.readKey(kv.key);
byte[] value = new byte[kv.value.length];
int length = is.read(value);
totalWritesValidated++;
if (length == kv.value.length && Arrays.equals(value, kv.value)) {
writeValidationSuccessCount++;
} else {
writeValidationFailureCount++;
LOG.warn("Data validation error for key {}/{}/{}",
kv.bucket.getVolumeName(), kv.bucket, kv.key);
LOG.warn("Expected checksum: {}, Actual checksum: {}",
DigestUtils.md5Hex(kv.value),
DigestUtils.md5Hex(value));
}
}
} catch (IOException | InterruptedException ex) {
LOG.error("Exception while validating write: " + ex.getMessage());
}
}
}
}
@VisibleForTesting
public void setNumOfVolumes(int numOfVolumes) {
this.numOfVolumes = numOfVolumes;
}
@VisibleForTesting
public void setNumOfBuckets(int numOfBuckets) {
this.numOfBuckets = numOfBuckets;
}
@VisibleForTesting
public void setNumOfKeys(int numOfKeys) {
this.numOfKeys = numOfKeys;
}
@VisibleForTesting
public void setNumOfThreads(int numOfThreads) {
this.numOfThreads = numOfThreads;
}
@VisibleForTesting
public void setKeySize(int keySize) {
this.keySize = keySize;
}
@VisibleForTesting
public void setType(ReplicationType type) {
this.type = type;
}
@VisibleForTesting
public void setFactor(ReplicationFactor factor) {
this.factor = factor;
}
@VisibleForTesting
public void setValidateWrites(boolean validateWrites) {
this.validateWrites = validateWrites;
}
}
| |
package com.cmput301f17t07.ingroove.MapActivities;
import android.location.Location;
import android.support.v4.app.FragmentActivity;
import android.os.Bundle;
import android.util.Log;
import com.cmput301f17t07.ingroove.DataManagers.Command.DataManagerAPI;
import com.cmput301f17t07.ingroove.DataManagers.DataManager;
import com.cmput301f17t07.ingroove.DataManagers.QueryTasks.AsyncResultHandler;
import com.cmput301f17t07.ingroove.Model.Habit;
import com.cmput301f17t07.ingroove.Model.HabitEvent;
import com.cmput301f17t07.ingroove.Model.User;
import com.cmput301f17t07.ingroove.R;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.MarkerOptions;
import java.util.ArrayList;
/**
* [Boundary Class]
* Displays a map of habit events on screen
*
* Uses code from android example projects.
* See https://github.com/googlesamples/android-play-location/tree/master/LocationUpdates
*
* @see MapOptionsActivity
* @see HabitEvent
* @see DataManagerAPI
*/
public class MapsActivity extends FragmentActivity implements OnMapReadyCallback {
DataManagerAPI data = DataManager.getInstance();
// Bundle Keys
public static final String USER_LOC_KEY = "user location";
public static final String CENTER_USER_LOC_KEY = "if centering on user location";
public static final String LOC_ARRAY_KEY = "array of locations";
public static final String HIGHLIGHT_NEAR_KEY = "highlight nearby locations";
public static final String FOLLOWEE_HABS_KEY = "if the map should include habits from those you follow";
public static final String USERS_HABS_KEY = "if the map should include your habits";
// Represents the google map fragment
private GoogleMap mMap = null;
private boolean center_on_user;
private boolean highlight_near;
private boolean show_followee_habs;
private boolean show_usr_habs;
//Represents a geographical location.
protected Location mLastLocation;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_maps);
mLastLocation = (Location) getIntent().getExtras().getParcelable(USER_LOC_KEY);
if (mLastLocation != null){
Log.d("---MAPS ACTIVITY---", "last location" + mLastLocation.toString());
}
center_on_user = getIntent().getExtras().getBoolean(CENTER_USER_LOC_KEY, false);
highlight_near = getIntent().getExtras().getBoolean(HIGHLIGHT_NEAR_KEY, false);
show_followee_habs = getIntent().getExtras().getBoolean(FOLLOWEE_HABS_KEY, true);
show_usr_habs = getIntent().getExtras().getBoolean(USERS_HABS_KEY, true);
// Obtain the SupportMapFragment and get notified when the map is ready to be used.
SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager()
.findFragmentById(R.id.map);
mapFragment.getMapAsync(this);
}
/**
* Manipulates the map once available.
* This callback is triggered when the map is ready to be used.
* This is where we can add markers or lines, add listeners or move the camera. In this case,
* we just add a marker near Sydney, Australia.
* If Google Play services is not installed on the device, the user will be prompted to install
* it inside the SupportMapFragment. This method will only be triggered once the user has
* installed Google Play services and returned to the app.
*/
@Override
public void onMapReady(GoogleMap googleMap) {
mMap = googleMap;
setup_map();
}
public void setup_map(){
// @TODO One of the use cases is to see habit events of those the user follows within 5km
// since this version doesn't support social aspects this code finds the events within
// 5km of the user from the user him/her self.
ArrayList<Habit> habits = data.getHabits();
if(show_followee_habs){
add_followee_habit_markers();
}
if(show_usr_habs){
add_user_habit_markers();
}
// Add the user's own location as a blue marker if available
if(center_on_user){
LatLng userLoc = new LatLng(mLastLocation.getLatitude(),mLastLocation.getLongitude());
mMap.addMarker(new MarkerOptions()
.position(userLoc)
.title("Your location")
.icon(BitmapDescriptorFactory
.defaultMarker(BitmapDescriptorFactory.HUE_AZURE)));
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(userLoc, 10));
}
}
private void add_user_habit_markers() {
ArrayList<HabitEvent> my_events = data.getHabitEvents();
// Add all User events Markers
for (HabitEvent e : my_events){
LatLng loc = e.getLocation();
if (loc != null){
if (eventNear(e)){
mMap.addMarker(new MarkerOptions()
.position(loc)
.title(e.getName())
.icon(BitmapDescriptorFactory
.defaultMarker(BitmapDescriptorFactory.HUE_RED)));
} else {
mMap.addMarker(new MarkerOptions()
.position(loc)
.title(e.getName())
.icon(BitmapDescriptorFactory
.defaultMarker(BitmapDescriptorFactory.HUE_GREEN)));
}
}
}
}
private void add_followee_habit_markers() {
// Get all the users this user follows
data.getWhoThisUserFollows(data.getUser(), new AsyncResultHandler<User>() {
@Override
public void handleResult(ArrayList<User> result) {
// For each one
Log.d("--MAP--", "Got "+ result.size() + " followers");
for(User u : result){
Log.d("---MAP---", "For user " + u.getName()+ " with id " + u.getUserID());
// Get all their habit events with locations
// Add those locations to the map
data.findHabitEvents(u, new AsyncResultHandler<HabitEvent>() {
@Override
public void handleResult(ArrayList<HabitEvent> result) {
for (HabitEvent e : result){
LatLng loc = e.getLocation();
if (loc != null){
if(eventNear(e)){
mMap.addMarker(new MarkerOptions()
.position(loc)
.title(e.getName())
.icon(BitmapDescriptorFactory
.defaultMarker(BitmapDescriptorFactory.HUE_RED)));
} else {
mMap.addMarker(new MarkerOptions()
.position(loc)
.title(e.getName())
.icon(BitmapDescriptorFactory
.defaultMarker(BitmapDescriptorFactory.HUE_GREEN)));
}
}
}
}
});
}
}
});
}
private boolean eventNear(HabitEvent e){
// Adapted from https://stackoverflow.com/questions/3694380/calculating-distance-between-two-points-using-latitude-longitude-what-am-i-doi
if (mLastLocation == null || !highlight_near){
return false;
}
double earthRadius = 6371000; //meters
double dLat = Math.toRadians(e.getLocation().latitude-mLastLocation.getLatitude());
double dLng = Math.toRadians(e.getLocation().longitude-mLastLocation.getLongitude());
double a = Math.sin(dLat/2) * Math.sin(dLat/2) +
Math.cos(Math.toRadians(mLastLocation.getLatitude())) * Math.cos(Math.toRadians(e.getLocation().latitude)) *
Math.sin(dLng/2) * Math.sin(dLng/2);
double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a));
float dist = (float) (earthRadius * c);
Log.d("---DISTANCE CALCULATION---", "Distance was computed as " + dist);
if (dist <= 5000){
return true;
} else {
return false;
}
}
}
| |
package com.perm.kate.api;
import java.io.Serializable;
import java.util.ArrayList;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class Notification implements Serializable {
private static final long serialVersionUID = 1L;
public static final String FOLLOW = "follow";
public static final String FRIEND_ACCEPTED = "friend_accepted";
public static final String MENTION = "mention";
public static final String MENTION_COMMENTS = "mention_comments";
public static final String WALL = "wall";
public static final String COMMENT_POST = "comment_post";
public static final String COMMENT_PHOTO = "comment_photo";
public static final String COMMENT_VIDEO = "comment_video";
public static final String REPLY_COMMENT = "reply_comment";//wall
public static final String REPLY_COMMENT_PHOTO = "reply_comment_photo";
public static final String REPLY_COMMENT_VIDEO = "reply_comment_video";
public static final String REPLY_TOPIC = "reply_topic";
public static final String LIKE_POST = "like_post";
public static final String LIKE_COMMENT = "like_comment";
public static final String LIKE_COMMENT_PHOTO = "like_comment_photo";
public static final String LIKE_COMMENT_VIDEO = "like_comment_video";
public static final String LIKE_COMMENT_TOPIC = "like_comment_topic";
public static final String LIKE_PHOTO = "like_photo";
public static final String LIKE_VIDEO = "like_video";
public static final String COPY_POST = "copy_post";
public static final String COPY_PHOTO = "copy_photo";
public static final String COPY_VIDEO = "copy_video";
public String type;
public Long date;
public Object parent;
public Object feedback;
public Object reply;
public Photo photo;//for type reply_comment_photo
public Video video;//for type reply_comment_video
public static Notification parse(JSONObject o) {
Notification n = null;
try {
n = new Notification();
n.type = o.getString("type");
n.date = o.optLong("date");
if (n.type.equals(FOLLOW)) {
JSONObject jfeedback = o.optJSONObject("feedback");//profiles
n.parent = null;//empty
if (jfeedback != null)
n.feedback = getProfiles(jfeedback);
} else if (n.type.equals(FRIEND_ACCEPTED)) {
JSONObject jfeedback = o.optJSONObject("feedback");//profiles
n.parent = null;//empty
if (jfeedback != null)
n.feedback = getProfiles(jfeedback);
} else if (n.type.equals(MENTION)) {
JSONObject jfeedback = o.optJSONObject("feedback");//post
n.parent = null;//empty
if (jfeedback != null)
n.feedback = WallMessage.parseForNotifications(jfeedback);
} else if (n.type.equals(MENTION_COMMENTS)) {
JSONObject jparent = o.optJSONObject("parent"); //post
JSONObject jfeedback = o.optJSONObject("feedback");//comment
if (jparent != null)
n.parent = WallMessage.parse(jparent);
if(jfeedback != null)
n.feedback = Comment.parseNotificationComment(jfeedback, false);
} else if (n.type.equals(WALL)) {
JSONObject jfeedback = o.optJSONObject("feedback");//post
if (jfeedback != null)
n.feedback = WallMessage.parseForNotifications(jfeedback);
} else if (n.type.equals(COMMENT_POST)) {
JSONObject jparent = o.optJSONObject("parent"); //post
JSONObject jfeedback = o.optJSONObject("feedback");//comment
if (jparent != null)
n.parent = WallMessage.parse(jparent);
if(jfeedback != null)
n.feedback = Comment.parseNotificationComment(jfeedback, false);
} else if (n.type.equals(COMMENT_PHOTO)) {
JSONObject jparent = o.optJSONObject("parent"); //photo
JSONObject jfeedback = o.optJSONObject("feedback");//comment
if (jparent != null)
n.parent = Photo.parse(jparent);
if(jfeedback != null)
n.feedback = Comment.parseNotificationComment(jfeedback, false);
} else if (n.type.equals(COMMENT_VIDEO)) {
JSONObject jparent = o.optJSONObject("parent"); //video
JSONObject jfeedback = o.optJSONObject("feedback");//comment
if (jparent != null)
n.parent = Video.parse(jparent);
if(jfeedback != null)
n.feedback = Comment.parseNotificationComment(jfeedback, false);
} else if (n.type.equals(REPLY_COMMENT)) {
JSONObject jparent = o.optJSONObject("parent"); //comment
JSONObject jfeedback = o.optJSONObject("feedback");//comment
if (jparent != null)
n.parent = Comment.parseNotificationComment(jparent, true);
if(jfeedback != null)
n.feedback = Comment.parseNotificationComment(jfeedback, false);
} else if (n.type.equals(REPLY_COMMENT_PHOTO)) {
JSONObject jparent = o.optJSONObject("parent"); //comment
JSONObject jfeedback = o.optJSONObject("feedback");//comment
if (jparent != null) {
n.parent = Comment.parseNotificationComment(jparent, false);
if(jparent.has("photo"))
n.photo=Photo.parse(jparent.optJSONObject("photo"));
}
if(jfeedback != null)
n.feedback = Comment.parseNotificationComment(jfeedback, false);
} else if (n.type.equals(REPLY_COMMENT_VIDEO)) {
JSONObject jparent = o.optJSONObject("parent"); //comment
JSONObject jfeedback = o.optJSONObject("feedback");//comment
if (jparent != null) {
n.parent = Comment.parseNotificationComment(jparent, false);
if(jparent.has("video"))
n.video=Video.parse(jparent.optJSONObject("video"));
}
if(jfeedback != null)
n.feedback = Comment.parseNotificationComment(jfeedback, false);
} else if (n.type.equals(REPLY_TOPIC)) {
JSONObject jparent = o.optJSONObject("parent"); //topic
JSONObject jfeedback = o.optJSONObject("feedback");//comment
if (jparent != null)
n.parent = GroupTopic.parseForNotifications(jparent);
if(jfeedback != null)
n.feedback = Comment.parseNotificationComment(jfeedback, false);
} else if (n.type.equals(LIKE_POST)) {
JSONObject jparent = o.optJSONObject("parent"); //post
JSONObject jfeedback = o.optJSONObject("feedback");//profiles
if (jparent != null)
n.parent = WallMessage.parse(jparent);
if(jfeedback != null)
n.feedback = getProfiles(jfeedback);
} else if (n.type.equals(LIKE_COMMENT)) {
JSONObject jparent = o.optJSONObject("parent"); //comment
JSONObject jfeedback = o.optJSONObject("feedback");//profiles
if (jparent != null)
n.parent = Comment.parseNotificationComment(jparent, true);
if(jfeedback != null)
n.feedback = getProfiles(jfeedback);
} else if (n.type.equals(LIKE_COMMENT_PHOTO)) {
JSONObject jparent = o.optJSONObject("parent"); //comment
JSONObject jfeedback = o.optJSONObject("feedback");//profiles
if (jparent != null) {
n.parent = Comment.parseNotificationComment(jparent, false);
if(jparent.has("photo"))
n.photo=Photo.parse(jparent.optJSONObject("photo"));
}
if(jfeedback != null)
n.feedback = getProfiles(jfeedback);
} else if (n.type.equals(LIKE_COMMENT_VIDEO)) {
JSONObject jparent = o.optJSONObject("parent"); //comment
JSONObject jfeedback = o.optJSONObject("feedback");//profiles
if (jparent != null) {
n.parent = Comment.parseNotificationComment(jparent, false);
if(jparent.has("video"))
n.video=Video.parse(jparent.optJSONObject("video"));
}
if(jfeedback != null)
n.feedback = getProfiles(jfeedback);
} else if (n.type.equals(LIKE_COMMENT_TOPIC)) {
JSONObject jparent = o.optJSONObject("parent"); //comment
JSONObject jfeedback = o.optJSONObject("feedback");//profiles
if (jparent != null) {
n.parent = Comment.parseNotificationComment(jparent, false);
//TODO
//if(jparent.has("topic"))
// n.xxx=Xxx.parse(jparent.optJSONObject("topic"));
}
if(jfeedback != null)
n.feedback = getProfiles(jfeedback);
} else if (n.type.equals(LIKE_PHOTO)) {
JSONObject jparent = o.optJSONObject("parent"); //photo
JSONObject jfeedback = o.optJSONObject("feedback");//profiles
if (jparent != null)
n.parent = Photo.parse(jparent);
if(jfeedback != null)
n.feedback = getProfiles(jfeedback);
} else if (n.type.equals(LIKE_VIDEO)) {
JSONObject jparent = o.optJSONObject("parent"); //video
JSONObject jfeedback = o.optJSONObject("feedback");//profiles
if (jparent != null)
n.parent = Video.parse(jparent);
if(jfeedback != null)
n.feedback = getProfiles(jfeedback);
} else if (n.type.equals(COPY_POST)) {
JSONObject jparent = o.optJSONObject("parent"); //wall
JSONObject jfeedback = o.optJSONObject("feedback");//copy
if (jparent != null)
n.parent = WallMessage.parse(jparent);
if(jfeedback != null)
n.feedback = getCopies(jfeedback);
} else if (n.type.equals(COPY_PHOTO)) {
JSONObject jparent = o.optJSONObject("parent"); //photo
JSONObject jfeedback = o.optJSONObject("feedback");//copy
if (jparent != null)
n.parent = Photo.parse(jparent);
if(jfeedback != null)
n.feedback = getCopies(jfeedback);
} else if (n.type.equals(COPY_VIDEO)) {
JSONObject jparent = o.optJSONObject("parent"); //video
JSONObject jfeedback = o.optJSONObject("feedback");//copy
if (jparent != null)
n.parent = Video.parse(jparent);
if(jfeedback != null)
n.feedback = getCopies(jfeedback);
}
JSONObject jreply = o.optJSONObject("reply");
if (jreply != null)
n.reply = Reply.parse(jreply);
} catch (JSONException ex) {
ex.printStackTrace();
return null;
}
return n;
}
public static ArrayList<Notification> parseNotifications(JSONArray jnotifications) throws JSONException {
ArrayList<Notification> notifications = new ArrayList<Notification>();
for(int i = 0; i < jnotifications.length(); i++) {
if(!(jnotifications.get(i) instanceof JSONObject))
continue;
JSONObject jgroup = (JSONObject)jnotifications.get(i);
Notification n = Notification.parse(jgroup);
if (n != null)
notifications.add(n);
}
return notifications;
}
public static ArrayList<Object> getProfiles(JSONObject jfeedback) throws JSONException {
ArrayList<Object> ids = new ArrayList<Object>();
JSONArray items=jfeedback.optJSONArray("items");
if(items==null)
return ids;
for (int i = 0; i < items.length(); i++) {
JSONObject j_id = items.optJSONObject(i);
if(j_id!=null)
ids.add(j_id.optLong("from_id"));
}
return ids;
}
public static ArrayList<Object> getCopies(JSONObject jfeedback) throws JSONException {
ArrayList<Object> ids = new ArrayList<Object>();
JSONArray items=jfeedback.optJSONArray("items");
if(items==null)
return ids;
for (int i = 0; i < items.length(); i++) {
JSONObject j_id = items.optJSONObject(i);
if(j_id==null)
continue;
Long id = j_id.optLong("id");
Long owner_id = j_id.optLong("from_id");
if (id != null && owner_id != null) {
IdsPair c = new IdsPair();
c.id = id;
c.owner_id = owner_id;
ids.add(c);
}
}
return ids;
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.jbcsrc;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.template.soy.data.SoyValueProvider;
import com.google.template.soy.exprtree.DataAccessNode;
import com.google.template.soy.exprtree.ExprNode;
import com.google.template.soy.exprtree.ExprRootNode;
import com.google.template.soy.exprtree.NullNode;
import com.google.template.soy.exprtree.OperatorNodes.ConditionalOpNode;
import com.google.template.soy.exprtree.OperatorNodes.NullCoalescingOpNode;
import com.google.template.soy.exprtree.VarRefNode;
import com.google.template.soy.jbcsrc.ExpressionCompiler.BasicExpressionCompiler;
import com.google.template.soy.jbcsrc.restricted.BytecodeUtils;
import com.google.template.soy.jbcsrc.restricted.CodeBuilder;
import com.google.template.soy.jbcsrc.restricted.Expression;
import com.google.template.soy.jbcsrc.restricted.FieldRef;
import com.google.template.soy.jbcsrc.restricted.MethodRef;
import com.google.template.soy.jbcsrc.restricted.SoyExpression;
import com.google.template.soy.jbcsrc.restricted.SoyRuntimeType;
import com.google.template.soy.soytree.defn.LocalVar;
import com.google.template.soy.soytree.defn.TemplateParam;
import java.util.Optional;
import javax.annotation.Nullable;
import org.objectweb.asm.Label;
import org.objectweb.asm.Type;
/**
* Attempts to compile an {@link ExprNode} to an {@link Expression} for a {@link SoyValueProvider}
* in order to preserve laziness.
*
* <p>There are two ways to use this depending on the specific requirements of the caller
*
* <ul>
* <li>{@link #compileAvoidingBoxing(ExprNode, Label)} attempts to compile the expression to a
* {@link SoyValueProvider} but without introducing any unnecessary boxing operations.
* Generating detach logic is OK. This case is for print operations, where callers may want to
* call {@link SoyValueProvider#renderAndResolve} to incrementally print the value. However,
* this is only desirable if the expression is naturally a {@link SoyValueProvider}.
* <li>{@link #compileAvoidingDetaches(ExprNode)} attempts to compile the expression to a {@link
* SoyValueProvider} with no detach logic. This is for passing data to templates or defining
* variables with {@code let} statements. In these cases boxing operations are fine (because
* the alternative is to use the {@link LazyClosureCompiler} which necessarily boxes the
* expression into a custom SoyValueProvider.
* </ul>
*
* <p>This is used as a basic optimization and as a necessary tool to implement template
* transclusions. If a template has a parameter {@code foo} then we want to be able to render it via
* {@link SoyValueProvider#renderAndResolve} so that we can render it incrementally.
*/
final class ExpressionToSoyValueProviderCompiler {
/**
* Create an expression compiler that can implement complex detaching logic with the given {@link
* ExpressionDetacher.Factory}
*/
static ExpressionToSoyValueProviderCompiler create(
TemplateVariableManager varManager,
ExpressionCompiler exprCompiler,
TemplateParameterLookup variables,
ExpressionDetacher.Factory detacherFactory) {
return new ExpressionToSoyValueProviderCompiler(
varManager, exprCompiler, variables, detacherFactory);
}
private final TemplateParameterLookup variables;
private final ExpressionCompiler exprCompiler;
private final TemplateVariableManager varManager;
private final ExpressionDetacher.Factory detacherFactory;
private ExpressionToSoyValueProviderCompiler(
TemplateVariableManager varManager,
ExpressionCompiler exprCompiler,
TemplateParameterLookup variables,
ExpressionDetacher.Factory detacherFactory) {
this.exprCompiler = exprCompiler;
this.variables = variables;
this.varManager = varManager;
this.detacherFactory = detacherFactory;
}
/**
* Compiles the given expression tree to a sequence of bytecode in the current method visitor.
*
* <p>If successful, the generated bytecode will resolve to a {@link SoyValueProvider} if it can
* be done without introducing unnecessary boxing operations. This is intended for situations
* (like print operations) where calling {@link SoyValueProvider#renderAndResolve} would be better
* than calling {@link #toString()} and passing directly to the output.
*
* <p>TODO(lukes): this method is confusingly named
*/
Optional<Expression> compileAvoidingBoxing(ExprNode node, Label reattachPoint) {
checkNotNull(node);
ExpressionDetacher detacher = detacherFactory.createExpressionDetacher(reattachPoint);
return new CompilerVisitor(
variables,
varManager,
/*exprCompiler=*/ null,
exprCompiler.asBasicCompiler(detacher),
detacher)
.exec(node);
}
/**
* Compiles the given expression tree to a sequence of bytecode in the current method visitor.
*
* <p>If successful, the generated bytecode will resolve to a {@link SoyValueProvider} if it can
* be done without introducing any detach operations. This is intended for situations where we
* need to model the expression as a SoyValueProvider to satisfy a contract (e.g. let nodes and
* params), but we also want to preserve any laziness. So boxing is fine, but detaches are not.
*/
Optional<Expression> compileAvoidingDetaches(ExprNode node) {
checkNotNull(node);
return new CompilerVisitor(
variables,
varManager,
exprCompiler,
/*detachingExprCompiler=*/ null,
/*detacher=*/ null)
.exec(node);
}
private static final class CompilerVisitor
extends EnhancedAbstractExprNodeVisitor<Optional<Expression>> {
final TemplateParameterLookup variables;
final TemplateVariableManager varManager;
// depending on the mode exprCompiler will be null, or detachingExprCompiler/detacher will be
// null.
@Nullable final ExpressionCompiler exprCompiler;
@Nullable final BasicExpressionCompiler detachingExprCompiler;
@Nullable final ExpressionDetacher detacher;
CompilerVisitor(
TemplateParameterLookup variables,
TemplateVariableManager varManager,
@Nullable ExpressionCompiler exprCompiler,
@Nullable BasicExpressionCompiler detachingExprCompiler,
@Nullable ExpressionDetacher detacher) {
this.variables = variables;
checkArgument((exprCompiler == null) != (detachingExprCompiler == null));
checkArgument((detacher == null) == (detachingExprCompiler == null));
this.exprCompiler = exprCompiler;
this.detachingExprCompiler = detachingExprCompiler;
this.detacher = detacher;
this.varManager = varManager;
}
private boolean allowsBoxing() {
return exprCompiler != null;
}
private boolean allowsDetaches() {
return detachingExprCompiler != null;
}
@Override
protected final Optional<Expression> visitExprRootNode(ExprRootNode node) {
return visit(node.getRoot());
}
// Primitive value constants
@Override
protected Optional<Expression> visitNullNode(NullNode node) {
// unlike other primitives, this doesn't really count as boxing, just a read of a static
// constant field. so we always do it
return Optional.of(FieldRef.NULL_PROVIDER.accessor());
}
@Override
protected Optional<Expression> visitNullCoalescingOpNode(NullCoalescingOpNode node) {
// All non-trivial ?: will require detaches for the left hand side.
if (allowsDetaches()) {
Optional<Expression> maybeLeft = visit(node.getLeftChild());
Optional<Expression> maybeRight = visit(node.getRightChild());
// Logging statements get dropped when a value is converted to a SoyValue. If at least one
// side can be compiled to a SoyValueProvider, there could be logging statements in it, so
// we need to compile the whole expression to a SoyValueProvider.
if (maybeLeft.isPresent() || maybeRight.isPresent()) {
// Get the SoyValueProviders, or box so both left and right are SoyValueProviders.
Expression right =
maybeRight.orElseGet(
() -> compileToSoyValueProviderWithDetaching(node.getRightChild()));
Expression left;
if (maybeLeft.isPresent()) {
// If left can be compiled to a SoyValueProvider, resolve it to check if it's null.
final Expression leftSVP = maybeLeft.get();
// Put the SoyValueProvider on the stack twice since we'll need it later.
Expression leftDup =
new Expression(leftSVP.resultType(), leftSVP.features(), leftSVP.location()) {
@Override
protected void doGen(CodeBuilder cb) {
leftSVP.gen(cb); // stack: SVP
cb.dup(); // stack: SVP, SVP
}
};
// Resolve the provider, so we can check if it's null.
final Expression resolved =
detacher
.resolveSoyValueProvider(leftDup)
.checkedCast(
SoyRuntimeType.getBoxedType(node.getLeftChild().getType()).runtimeType());
// But throw away the resolved value (since it won't have logging calls in it) and
// instead use the extra SoyValueProvider on the stack from before.
left =
new Expression(leftSVP.resultType(), leftSVP.features(), leftSVP.location()) {
@Override
protected void doGen(CodeBuilder cb) {
resolved.gen(cb); // stack: SVP, SV
cb.pop(); // stack: SVP
}
};
} else {
// If left cannot be compiled to a SoyValueProvider, compile it to a SoyValue and box it
// into a SoyValueProvider.
left = compileToSoyValueProviderWithDetaching(node.getLeftChild());
}
// Convert left to null if it's a SoyValueProvider wrapping null, for the null check
// below.
left = MethodRef.SOY_VALUE_PROVIDER_OR_NULL.invoke(left);
return Optional.of(BytecodeUtils.firstNonNull(left, right));
}
}
return visitExprNode(node);
}
private Expression compileToSoyValueProviderWithDetaching(ExprNode expr) {
return detachingExprCompiler.compile(expr).boxAsSoyValueProvider();
}
@Override
protected final Optional<Expression> visitConditionalOpNode(ConditionalOpNode node) {
if (allowsDetaches()) {
Optional<Expression> trueBranch = visit(node.getChild(1));
Optional<Expression> falseBranch = visit(node.getChild(2));
// Compile to a SoyValueProvider if either side can be compiled to a SoyValueProvider. The
// SoyValueProvider side(s) may have logging statements in them, so need to stay
// SoyValueProviders, otherwise the logging statements will get dropped.
if (trueBranch.isPresent() || falseBranch.isPresent()) {
Expression condition = detachingExprCompiler.compile(node.getChild(0)).coerceToBoolean();
return Optional.of(
BytecodeUtils.ternary(
condition,
trueBranch.orElseGet(
() -> compileToSoyValueProviderWithDetaching(node.getChild(1))),
falseBranch.orElseGet(
() -> compileToSoyValueProviderWithDetaching(node.getChild(2))),
// The ternary gets its result type from the true branch, which could be a
// SoyValue. Since at least one of the branches is a SoyValueProvider, force the
// result type to SoyValueProvider so downstream code knows to resolve it before
// using it.
BytecodeUtils.SOY_VALUE_PROVIDER_TYPE));
} else {
return Optional.empty();
}
}
return visitExprNode(node);
}
@Override
Optional<Expression> visitForLoopVar(VarRefNode varRef, LocalVar local) {
Expression loopVar = variables.getLocal(local);
if (loopVar.resultType().equals(Type.LONG_TYPE)) {
// this happens in foreach loops over ranges
if (allowsBoxing()) {
return Optional.of(SoyExpression.forInt(loopVar).box());
}
return Optional.empty();
} else {
return Optional.of(loopVar);
}
}
@Override
Optional<Expression> visitParam(VarRefNode varRef, TemplateParam param) {
return Optional.of(variables.getParam(param));
}
@Override
Optional<Expression> visitLetNodeVar(VarRefNode varRef, LocalVar local) {
return Optional.of(variables.getLocal(local));
}
@Override
protected Optional<Expression> visitDataAccessNode(DataAccessNode node) {
// TODO(lukes): implement special case for allowsDetaches(). The complex part will be sharing
// null safety access logic with the ExpressionCompiler
return visitExprNode(node);
}
@Override
protected final Optional<Expression> visitExprNode(ExprNode node) {
if (allowsBoxing()) {
Optional<SoyExpression> compileWithNoDetaches = exprCompiler.compileWithNoDetaches(node);
if (compileWithNoDetaches.isPresent()) {
return Optional.of(compileWithNoDetaches.get().boxAsSoyValueProvider());
}
}
return Optional.empty();
}
}
}
| |
// This file was generated automatically by the Snowball to Java compiler
package org.tartarus.snowball.ext;
import org.tartarus.snowball.Among;
/**
* This class was automatically generated by a Snowball to Java compiler It
* implements the stemming algorithm defined by a snowball script.
*/
public class dutchStemmer extends org.tartarus.snowball.SnowballStemmer {
private static final long serialVersionUID = 1L;
private final static dutchStemmer methodObject = new dutchStemmer();
private final static Among a_0[] = {
new Among("", -1, 6, "", methodObject),
new Among("\u00E1", 0, 1, "", methodObject),
new Among("\u00E4", 0, 1, "", methodObject),
new Among("\u00E9", 0, 2, "", methodObject),
new Among("\u00EB", 0, 2, "", methodObject),
new Among("\u00ED", 0, 3, "", methodObject),
new Among("\u00EF", 0, 3, "", methodObject),
new Among("\u00F3", 0, 4, "", methodObject),
new Among("\u00F6", 0, 4, "", methodObject),
new Among("\u00FA", 0, 5, "", methodObject),
new Among("\u00FC", 0, 5, "", methodObject) };
private final static Among a_1[] = {
new Among("", -1, 3, "", methodObject),
new Among("I", 0, 2, "", methodObject),
new Among("Y", 0, 1, "", methodObject) };
private final static Among a_2[] = {
new Among("dd", -1, -1, "", methodObject),
new Among("kk", -1, -1, "", methodObject),
new Among("tt", -1, -1, "", methodObject) };
private final static Among a_3[] = {
new Among("ene", -1, 2, "", methodObject),
new Among("se", -1, 3, "", methodObject),
new Among("en", -1, 2, "", methodObject),
new Among("heden", 2, 1, "", methodObject),
new Among("s", -1, 3, "", methodObject) };
private final static Among a_4[] = {
new Among("end", -1, 1, "", methodObject),
new Among("ig", -1, 2, "", methodObject),
new Among("ing", -1, 1, "", methodObject),
new Among("lijk", -1, 3, "", methodObject),
new Among("baar", -1, 4, "", methodObject),
new Among("bar", -1, 5, "", methodObject) };
private final static Among a_5[] = {
new Among("aa", -1, -1, "", methodObject),
new Among("ee", -1, -1, "", methodObject),
new Among("oo", -1, -1, "", methodObject),
new Among("uu", -1, -1, "", methodObject) };
private static final char g_v[] = { 17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 128 };
private static final char g_v_I[] = { 1, 0, 0, 17, 65, 16, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 128 };
private static final char g_v_j[] = { 17, 67, 16, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 128 };
private int I_p2;
private int I_p1;
private boolean B_e_found;
private void copy_from(dutchStemmer other) {
I_p2 = other.I_p2;
I_p1 = other.I_p1;
B_e_found = other.B_e_found;
super.copy_from(other);
}
private boolean r_prelude() {
int among_var;
int v_1;
int v_2;
int v_3;
int v_4;
int v_5;
int v_6;
// (, line 41
// test, line 42
v_1 = cursor;
// repeat, line 42
replab0: while (true) {
v_2 = cursor;
lab1: do {
// (, line 42
// [, line 43
bra = cursor;
// substring, line 43
among_var = find_among(a_0, 11);
if (among_var == 0) {
break lab1;
}
// ], line 43
ket = cursor;
switch (among_var) {
case 0:
break lab1;
case 1:
// (, line 45
// <-, line 45
slice_from("a");
break;
case 2:
// (, line 47
// <-, line 47
slice_from("e");
break;
case 3:
// (, line 49
// <-, line 49
slice_from("i");
break;
case 4:
// (, line 51
// <-, line 51
slice_from("o");
break;
case 5:
// (, line 53
// <-, line 53
slice_from("u");
break;
case 6:
// (, line 54
// next, line 54
if (cursor >= limit) {
break lab1;
}
cursor++;
break;
}
continue replab0;
} while (false);
cursor = v_2;
break replab0;
}
cursor = v_1;
// try, line 57
v_3 = cursor;
lab2: do {
// (, line 57
// [, line 57
bra = cursor;
// literal, line 57
if (!(eq_s(1, "y"))) {
cursor = v_3;
break lab2;
}
// ], line 57
ket = cursor;
// <-, line 57
slice_from("Y");
} while (false);
// repeat, line 58
replab3: while (true) {
v_4 = cursor;
lab4: do {
// goto, line 58
golab5: while (true) {
v_5 = cursor;
lab6: do {
// (, line 58
if (!(in_grouping(g_v, 97, 232))) {
break lab6;
}
// [, line 59
bra = cursor;
// or, line 59
lab7: do {
v_6 = cursor;
lab8: do {
// (, line 59
// literal, line 59
if (!(eq_s(1, "i"))) {
break lab8;
}
// ], line 59
ket = cursor;
if (!(in_grouping(g_v, 97, 232))) {
break lab8;
}
// <-, line 59
slice_from("I");
break lab7;
} while (false);
cursor = v_6;
// (, line 60
// literal, line 60
if (!(eq_s(1, "y"))) {
break lab6;
}
// ], line 60
ket = cursor;
// <-, line 60
slice_from("Y");
} while (false);
cursor = v_5;
break golab5;
} while (false);
cursor = v_5;
if (cursor >= limit) {
break lab4;
}
cursor++;
}
continue replab3;
} while (false);
cursor = v_4;
break replab3;
}
return true;
}
private boolean r_mark_regions() {
// (, line 64
I_p1 = limit;
I_p2 = limit;
// gopast, line 69
golab0: while (true) {
lab1: do {
if (!(in_grouping(g_v, 97, 232))) {
break lab1;
}
break golab0;
} while (false);
if (cursor >= limit) {
return false;
}
cursor++;
}
// gopast, line 69
golab2: while (true) {
lab3: do {
if (!(out_grouping(g_v, 97, 232))) {
break lab3;
}
break golab2;
} while (false);
if (cursor >= limit) {
return false;
}
cursor++;
}
// setmark p1, line 69
I_p1 = cursor;
// try, line 70
lab4: do {
// (, line 70
if (!(I_p1 < 3)) {
break lab4;
}
I_p1 = 3;
} while (false);
// gopast, line 71
golab5: while (true) {
lab6: do {
if (!(in_grouping(g_v, 97, 232))) {
break lab6;
}
break golab5;
} while (false);
if (cursor >= limit) {
return false;
}
cursor++;
}
// gopast, line 71
golab7: while (true) {
lab8: do {
if (!(out_grouping(g_v, 97, 232))) {
break lab8;
}
break golab7;
} while (false);
if (cursor >= limit) {
return false;
}
cursor++;
}
// setmark p2, line 71
I_p2 = cursor;
return true;
}
private boolean r_postlude() {
int among_var;
int v_1;
// repeat, line 75
replab0: while (true) {
v_1 = cursor;
lab1: do {
// (, line 75
// [, line 77
bra = cursor;
// substring, line 77
among_var = find_among(a_1, 3);
if (among_var == 0) {
break lab1;
}
// ], line 77
ket = cursor;
switch (among_var) {
case 0:
break lab1;
case 1:
// (, line 78
// <-, line 78
slice_from("y");
break;
case 2:
// (, line 79
// <-, line 79
slice_from("i");
break;
case 3:
// (, line 80
// next, line 80
if (cursor >= limit) {
break lab1;
}
cursor++;
break;
}
continue replab0;
} while (false);
cursor = v_1;
break replab0;
}
return true;
}
private boolean r_R1() {
if (!(I_p1 <= cursor)) {
return false;
}
return true;
}
private boolean r_R2() {
if (!(I_p2 <= cursor)) {
return false;
}
return true;
}
private boolean r_undouble() {
int v_1;
// (, line 90
// test, line 91
v_1 = limit - cursor;
// among, line 91
if (find_among_b(a_2, 3) == 0) {
return false;
}
cursor = limit - v_1;
// [, line 91
ket = cursor;
// next, line 91
if (cursor <= limit_backward) {
return false;
}
cursor--;
// ], line 91
bra = cursor;
// delete, line 91
slice_del();
return true;
}
private boolean r_e_ending() {
int v_1;
// (, line 94
// unset e_found, line 95
B_e_found = false;
// [, line 96
ket = cursor;
// literal, line 96
if (!(eq_s_b(1, "e"))) {
return false;
}
// ], line 96
bra = cursor;
// call R1, line 96
if (!r_R1()) {
return false;
}
// test, line 96
v_1 = limit - cursor;
if (!(out_grouping_b(g_v, 97, 232))) {
return false;
}
cursor = limit - v_1;
// delete, line 96
slice_del();
// set e_found, line 97
B_e_found = true;
// call undouble, line 98
if (!r_undouble()) {
return false;
}
return true;
}
private boolean r_en_ending() {
int v_1;
int v_2;
// (, line 101
// call R1, line 102
if (!r_R1()) {
return false;
}
// and, line 102
v_1 = limit - cursor;
if (!(out_grouping_b(g_v, 97, 232))) {
return false;
}
cursor = limit - v_1;
// not, line 102
{
v_2 = limit - cursor;
lab0: do {
// literal, line 102
if (!(eq_s_b(3, "gem"))) {
break lab0;
}
return false;
} while (false);
cursor = limit - v_2;
}
// delete, line 102
slice_del();
// call undouble, line 103
if (!r_undouble()) {
return false;
}
return true;
}
private boolean r_standard_suffix() {
int among_var;
int v_1;
int v_2;
int v_3;
int v_4;
int v_5;
int v_6;
int v_7;
int v_8;
int v_9;
int v_10;
// (, line 106
// do, line 107
v_1 = limit - cursor;
lab0: do {
// (, line 107
// [, line 108
ket = cursor;
// substring, line 108
among_var = find_among_b(a_3, 5);
if (among_var == 0) {
break lab0;
}
// ], line 108
bra = cursor;
switch (among_var) {
case 0:
break lab0;
case 1:
// (, line 110
// call R1, line 110
if (!r_R1()) {
break lab0;
}
// <-, line 110
slice_from("heid");
break;
case 2:
// (, line 113
// call en_ending, line 113
if (!r_en_ending()) {
break lab0;
}
break;
case 3:
// (, line 116
// call R1, line 116
if (!r_R1()) {
break lab0;
}
if (!(out_grouping_b(g_v_j, 97, 232))) {
break lab0;
}
// delete, line 116
slice_del();
break;
}
} while (false);
cursor = limit - v_1;
// do, line 120
v_2 = limit - cursor;
lab1: do {
// call e_ending, line 120
if (!r_e_ending()) {
break lab1;
}
} while (false);
cursor = limit - v_2;
// do, line 122
v_3 = limit - cursor;
lab2: do {
// (, line 122
// [, line 122
ket = cursor;
// literal, line 122
if (!(eq_s_b(4, "heid"))) {
break lab2;
}
// ], line 122
bra = cursor;
// call R2, line 122
if (!r_R2()) {
break lab2;
}
// not, line 122
{
v_4 = limit - cursor;
lab3: do {
// literal, line 122
if (!(eq_s_b(1, "c"))) {
break lab3;
}
break lab2;
} while (false);
cursor = limit - v_4;
}
// delete, line 122
slice_del();
// [, line 123
ket = cursor;
// literal, line 123
if (!(eq_s_b(2, "en"))) {
break lab2;
}
// ], line 123
bra = cursor;
// call en_ending, line 123
if (!r_en_ending()) {
break lab2;
}
} while (false);
cursor = limit - v_3;
// do, line 126
v_5 = limit - cursor;
lab4: do {
// (, line 126
// [, line 127
ket = cursor;
// substring, line 127
among_var = find_among_b(a_4, 6);
if (among_var == 0) {
break lab4;
}
// ], line 127
bra = cursor;
switch (among_var) {
case 0:
break lab4;
case 1:
// (, line 129
// call R2, line 129
if (!r_R2()) {
break lab4;
}
// delete, line 129
slice_del();
// or, line 130
lab5: do {
v_6 = limit - cursor;
lab6: do {
// (, line 130
// [, line 130
ket = cursor;
// literal, line 130
if (!(eq_s_b(2, "ig"))) {
break lab6;
}
// ], line 130
bra = cursor;
// call R2, line 130
if (!r_R2()) {
break lab6;
}
// not, line 130
{
v_7 = limit - cursor;
lab7: do {
// literal, line 130
if (!(eq_s_b(1, "e"))) {
break lab7;
}
break lab6;
} while (false);
cursor = limit - v_7;
}
// delete, line 130
slice_del();
break lab5;
} while (false);
cursor = limit - v_6;
// call undouble, line 130
if (!r_undouble()) {
break lab4;
}
} while (false);
break;
case 2:
// (, line 133
// call R2, line 133
if (!r_R2()) {
break lab4;
}
// not, line 133
{
v_8 = limit - cursor;
lab8: do {
// literal, line 133
if (!(eq_s_b(1, "e"))) {
break lab8;
}
break lab4;
} while (false);
cursor = limit - v_8;
}
// delete, line 133
slice_del();
break;
case 3:
// (, line 136
// call R2, line 136
if (!r_R2()) {
break lab4;
}
// delete, line 136
slice_del();
// call e_ending, line 136
if (!r_e_ending()) {
break lab4;
}
break;
case 4:
// (, line 139
// call R2, line 139
if (!r_R2()) {
break lab4;
}
// delete, line 139
slice_del();
break;
case 5:
// (, line 142
// call R2, line 142
if (!r_R2()) {
break lab4;
}
// Boolean test e_found, line 142
if (!(B_e_found)) {
break lab4;
}
// delete, line 142
slice_del();
break;
}
} while (false);
cursor = limit - v_5;
// do, line 146
v_9 = limit - cursor;
lab9: do {
// (, line 146
if (!(out_grouping_b(g_v_I, 73, 232))) {
break lab9;
}
// test, line 148
v_10 = limit - cursor;
// (, line 148
// among, line 149
if (find_among_b(a_5, 4) == 0) {
break lab9;
}
if (!(out_grouping_b(g_v, 97, 232))) {
break lab9;
}
cursor = limit - v_10;
// [, line 152
ket = cursor;
// next, line 152
if (cursor <= limit_backward) {
break lab9;
}
cursor--;
// ], line 152
bra = cursor;
// delete, line 152
slice_del();
} while (false);
cursor = limit - v_9;
return true;
}
public boolean stem() {
int v_1;
int v_2;
int v_3;
int v_4;
// (, line 157
// do, line 159
v_1 = cursor;
lab0: do {
// call prelude, line 159
if (!r_prelude()) {
break lab0;
}
} while (false);
cursor = v_1;
// do, line 160
v_2 = cursor;
lab1: do {
// call mark_regions, line 160
if (!r_mark_regions()) {
break lab1;
}
} while (false);
cursor = v_2;
// backwards, line 161
limit_backward = cursor;
cursor = limit;
// do, line 162
v_3 = limit - cursor;
lab2: do {
// call standard_suffix, line 162
if (!r_standard_suffix()) {
break lab2;
}
} while (false);
cursor = limit - v_3;
cursor = limit_backward; // do, line 163
v_4 = cursor;
lab3: do {
// call postlude, line 163
if (!r_postlude()) {
break lab3;
}
} while (false);
cursor = v_4;
return true;
}
public boolean equals(Object o) {
return o instanceof dutchStemmer;
}
public int hashCode() {
return dutchStemmer.class.getName().hashCode();
}
}
| |
/*
* =============================================================================
*
* Copyright (c) 2011-2014, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.thymeleaf.standard.expression;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.thymeleaf.IEngineConfiguration;
import org.thymeleaf.context.IProcessingContext;
import org.thymeleaf.exceptions.TemplateProcessingException;
import org.thymeleaf.util.StringUtils;
import org.thymeleaf.util.Validate;
/**
*
* @author Daniel Fernández
*
* @since 2.1.0
*
*/
public final class FragmentSelectionUtils {
private static final String TEMPLATE_NAME_CURRENT_TEMPLATE = "this";
private static final String OPERATOR = "::";
private static final String UNNAMED_PARAMETERS_PREFIX = "_arg";
public static boolean hasSyntheticParameters(
final ParsedFragmentSelection fragmentSelection,
final IProcessingContext processingContext,
final StandardExpressionExecutionContext expContext) {
// The parameter sequence will be considered "synthetically named" if its variable names are all synthetic
// (see the "parameterNamesAreSynthetic" method for more info).
if (!fragmentSelection.hasParameters()) {
return false;
}
final AssignationSequence fragmentSelectionParameters = fragmentSelection.getParameters();
final Set<String> variableNames = new HashSet<String>(fragmentSelectionParameters.size() + 2);
for (final Assignation assignation : fragmentSelectionParameters.getAssignations()) {
final IStandardExpression variableNameExpr = assignation.getLeft();
final Object variableNameValue = variableNameExpr.execute(processingContext, expContext);
final String variableName = (variableNameValue == null? null : variableNameValue.toString());
variableNames.add(variableName);
}
return parameterNamesAreSynthetic(variableNames);
}
public static ParsedFragmentSelection parseFragmentSelection(
final IProcessingContext processingContext, final String input) {
Validate.notNull(processingContext, "Processing Context cannot be null");
Validate.notNull(input, "Input cannot be null");
final IEngineConfiguration configuration = processingContext.getConfiguration();
final String preprocessedInput =
StandardExpressionPreprocessor.preprocess(processingContext, input);
if (configuration != null) {
final ParsedFragmentSelection cachedFragmentSelection =
ExpressionCache.getFragmentSelectionFromCache(configuration, preprocessedInput);
if (cachedFragmentSelection != null) {
return cachedFragmentSelection;
}
}
final ParsedFragmentSelection fragmentSelection =
FragmentSelectionUtils.internalParseFragmentSelection(preprocessedInput.trim());
if (fragmentSelection == null) {
return null;
}
if (configuration != null) {
ExpressionCache.putFragmentSelectionIntoCache(configuration, preprocessedInput, fragmentSelection);
}
return fragmentSelection;
}
static ParsedFragmentSelection internalParseFragmentSelection(final String input) {
if (StringUtils.isEmptyOrWhitespace(input)) {
return null;
}
final String trimmedInput = input.trim();
final int lastParenthesesGroupPos = indexOfLastParenthesesGroup(trimmedInput);
final String inputWithoutParameters;
String parametersStr;
if (lastParenthesesGroupPos != -1) {
parametersStr = trimmedInput.substring(lastParenthesesGroupPos).trim();
inputWithoutParameters = trimmedInput.substring(0, lastParenthesesGroupPos).trim();
} else {
parametersStr = null;
inputWithoutParameters = trimmedInput;
}
String templateNameStr;
String fragmentSpecStr;
final int operatorPos = inputWithoutParameters.indexOf(OPERATOR);
if (operatorPos == -1) {
// no operator means everything is considered "before operator" (there is template name, but no
// fragment name -- template is to be included in its entirety).
templateNameStr = inputWithoutParameters;
fragmentSpecStr = null;
if (StringUtils.isEmptyOrWhitespace(templateNameStr)) {
if (parametersStr != null) {
// Parameters weren't parameters, they actually were the template name!
templateNameStr = parametersStr;
parametersStr = null;
} else {
// parameters are null, so template name is empty, and therefore wrong.
return null;
}
}
} else {
// There IS operator: we should divide between template name (which can be empty) and fragment spec.
templateNameStr = inputWithoutParameters.substring(0, operatorPos).trim();
fragmentSpecStr = inputWithoutParameters.substring(operatorPos + OPERATOR.length()).trim();
if (StringUtils.isEmptyOrWhitespace(fragmentSpecStr)) {
if (parametersStr != null) {
// Parameters weren't parameters, they actually were the fragment spec!
fragmentSpecStr = parametersStr;
parametersStr = null;
} else {
// parameters are null, so fragment specification is empty, and therefore wrong (because we
// have already established that the :: operator IS present.
return null;
}
}
}
final Expression templateNameExpression;
if (!StringUtils.isEmptyOrWhitespace(templateNameStr)) {
templateNameExpression = parseDefaultAsLiteral(templateNameStr);
if (templateNameExpression == null) {
return null;
}
} else {
templateNameExpression = null;
}
final Expression fragmentSpecExpression;
if (!StringUtils.isEmptyOrWhitespace(fragmentSpecStr)) {
fragmentSpecExpression = parseDefaultAsLiteral(fragmentSpecStr);
if (fragmentSpecExpression == null) {
return null;
}
} else {
fragmentSpecExpression = null;
}
if (!StringUtils.isEmptyOrWhitespace(parametersStr)) {
// When parsing this, we don't allow parameters without value because we would be mistakingly
// parsing as parameter names what in fact are values for synthetically named parameters.
final AssignationSequence parametersAsSeq =
AssignationUtils.internalParseAssignationSequence(parametersStr, false);
if (parametersAsSeq != null) {
return new ParsedFragmentSelection(templateNameExpression, fragmentSpecExpression, parametersAsSeq);
}
// Parameters wheren't parseable as an assignation sequence. So we should try parsing as Expression
// sequence and create a synthetically named parameter sequence with the expressions in the sequence as
// values.
final ExpressionSequence parametersExpSeq =
ExpressionSequenceUtils.internalParseExpressionSequence(parametersStr);
if (parametersExpSeq != null) {
final AssignationSequence parametersAsSeqFromExp =
createSyntheticallyNamedParameterSequence(parametersExpSeq);
return new ParsedFragmentSelection(templateNameExpression, fragmentSpecExpression, parametersAsSeqFromExp);
}
// The parameters str is not parsable neither as an assignation sequence nor as an expression sequence,
// so we can come to the conclusion it is wrong.
return null;
}
return new ParsedFragmentSelection(templateNameExpression, fragmentSpecExpression, null);
}
private static Expression parseDefaultAsLiteral(final String input) {
if (StringUtils.isEmptyOrWhitespace(input)) {
return null;
}
final Expression expr = Expression.parse(input);
if (expr == null) {
return Expression.parse(TextLiteralExpression.wrapStringIntoLiteral(input));
}
return expr;
}
private static int indexOfLastParenthesesGroup(final String input) {
final int inputLen = input.length();
final char finalC = input.charAt(inputLen - 1);
if (finalC != ')') {
// If there are parentheses, the last char must be an ending one.
return -1;
}
int parenLevel = 1;
for (int i = inputLen - 2; i >= 0; i--) {
final char c = input.charAt(i);
if (c == '(') {
parenLevel--;
if (parenLevel == 0) {
// We have closed a parenthesis at level 0, this might be what we were looking for.
if (i == (inputLen - 2)) {
// These are not real parameters, but "()", which might be a "text()" node selector.
return -1;
}
return i;
}
} else if (c == ')') {
parenLevel++;
}
}
// Cannot parse: will never be able to determine whether there are parameters or not, because they aren't
// correctly closed. Just return -1 as if we didn't find parentheses at all.
return -1;
}
public static boolean parameterNamesAreSynthetic(final Set<String> parameterNames) {
Validate.notNull(parameterNames, "Parameter names set cannot be null");
// Parameter names in an assignation sequence will be considered "synthetic" if all variable names
// start by "_arg", followed by a number. This will mean they have been automatically
// assigned when parsed because no names were assigned.
for (final String parameterName : parameterNames) {
if (parameterName == null) {
return false;
}
if (!parameterName.startsWith(UNNAMED_PARAMETERS_PREFIX)) {
return false;
}
final int parameterNameLen = parameterName.length();
for (int i = UNNAMED_PARAMETERS_PREFIX.length(); i < parameterNameLen; i++) {
final char c = parameterName.charAt(i);
if (!Character.isDigit(c)) {
return false;
}
}
}
return true;
}
static String getSyntheticParameterNameForIndex(final int i) {
return UNNAMED_PARAMETERS_PREFIX + i;
}
private static AssignationSequence createSyntheticallyNamedParameterSequence(final ExpressionSequence expSeq) {
final List<Assignation> assignations = new ArrayList<Assignation>(expSeq.size() + 2);
int argIndex = 0;
for (final IStandardExpression expression : expSeq.getExpressions()) {
final IStandardExpression parameterName =
Expression.parse(TextLiteralExpression.wrapStringIntoLiteral(UNNAMED_PARAMETERS_PREFIX + argIndex++));
assignations.add(new Assignation(parameterName, expression));
}
return new AssignationSequence(assignations);
}
public static ProcessedFragmentSelection processFragmentSelection(
final IProcessingContext processingContext, final ParsedFragmentSelection fragmentSelection) {
Validate.notNull(processingContext, "Processing Context cannot be null");
Validate.notNull(fragmentSelection, "Fragment Selection cannot be null");
/*
* COMPUTE THE TEMPLATE NAME
*/
final IStandardExpression templateNameExpression = fragmentSelection.getTemplateName();
final String templateName;
if (templateNameExpression != null) {
final Object templateNameObject = templateNameExpression.execute(processingContext);
if (templateNameObject == null) {
throw new TemplateProcessingException(
"Evaluation of template name from spec \"" + fragmentSelection.getStringRepresentation() + "\" returned null.");
}
final String evaluatedTemplateName = templateNameObject.toString();
if (TEMPLATE_NAME_CURRENT_TEMPLATE.equals(evaluatedTemplateName)) {
// Template name is "this" and therefore we are including a fragment from the same template.
templateName = null;
} else {
templateName = templateNameObject.toString();
}
} else {
// If template name expression is null, we will execute the fragment on the "current" template
templateName = null;
}
/*
* RESOLVE FRAGMENT PARAMETERS if specified (null if not)
*/
final Map<String, Object> fragmentParameters =
resolveProcessedFragmentParameters(processingContext, fragmentSelection.getParameters());
/*
* COMPUTE THE FRAGMENT SELECTOR
*/
String fragmentSelector = null;
if (fragmentSelection.hasFragmentSelector()) {
final Object fragmentSelectorObject =
fragmentSelection.getFragmentSelector().execute(processingContext);
if (fragmentSelectorObject == null) {
throw new TemplateProcessingException(
"Evaluation of fragment selector from spec \"" + fragmentSelection + "\" returned null.");
}
fragmentSelector = fragmentSelectorObject.toString();
if (fragmentSelector.length() > 3 &&
fragmentSelector.charAt(0) == '[' && fragmentSelector.charAt(fragmentSelector.length() - 1) == ']' &&
fragmentSelector.charAt(fragmentSelector.length() - 2) != '\'') {
// For legacy compatibility reasons, we allow fragment DOM Selector expressions to be specified
// between brackets. Just remove them.
fragmentSelector = fragmentSelector.substring(1, fragmentSelector.length() - 1).trim();
}
}
return new ProcessedFragmentSelection(templateName, fragmentSelector, fragmentParameters);
}
private static Map<String,Object> resolveProcessedFragmentParameters(
final IProcessingContext processingContext, final AssignationSequence parameters) {
if (parameters == null || parameters.size() == 0) {
return null;
}
final Map<String,Object> parameterValues = new HashMap<String, Object>(parameters.size() + 2);
final List<Assignation> assignationValues = parameters.getAssignations();
final int assignationValuesLen = assignationValues.size();
for (int i = 0; i < assignationValuesLen; i++) {
final Assignation assignation = assignationValues.get(i);
final IStandardExpression parameterNameExpr = assignation.getLeft();
final Object parameterNameValue = parameterNameExpr.execute(processingContext);
final String parameterName = (parameterNameValue == null? null : parameterNameValue.toString());
final IStandardExpression parameterValueExpr = assignation.getRight();
final Object parameterValueValue = parameterValueExpr.execute(processingContext);
parameterValues.put(parameterName, parameterValueValue);
}
return parameterValues;
}
private FragmentSelectionUtils() {
super();
}
}
| |
/*
* Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved.
*/
package net.snowflake.client.core.arrow;
import java.sql.Date;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.util.Calendar;
import java.util.TimeZone;
import net.snowflake.client.core.IncidentUtil;
import net.snowflake.client.core.ResultUtil;
import net.snowflake.client.core.SFException;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeTimestampWithTimezone;
import net.snowflake.client.log.ArgSupplier;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.common.core.CalendarCache;
/** Result utility methods specifically for Arrow format */
public class ArrowResultUtil {
private static final SFLogger logger = SFLoggerFactory.getLogger(ArrowResultUtil.class);
private static final int[] POWERS_OF_10 = {
1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000
};
public static final int MAX_SCALE_POWERS_OF_10 = 9;
public static long powerOfTen(int pow) {
long val = 1;
while (pow > MAX_SCALE_POWERS_OF_10) {
val *= POWERS_OF_10[MAX_SCALE_POWERS_OF_10];
pow -= MAX_SCALE_POWERS_OF_10;
}
return val * POWERS_OF_10[pow];
}
public static String getStringFormat(int scale) {
StringBuilder sb = new StringBuilder();
return sb.append("%.").append(scale).append('f').toString();
}
/**
* new method to get Date from integer
*
* @param day
* @return Date
*/
public static Date getDate(int day) {
LocalDate localDate = LocalDate.ofEpochDay(day);
return Date.valueOf(localDate);
}
/**
* Method to get Date from integer using timezone offsets
*
* @param day
* @param oldTz
* @param newTz
* @return
* @throws SFException
*/
public static Date getDate(int day, TimeZone oldTz, TimeZone newTz) throws SFException {
try {
// return the date adjusted to the JVM default time zone
long milliSecsSinceEpoch = (long) day * ResultUtil.MILLIS_IN_ONE_DAY;
long milliSecsSinceEpochNew =
milliSecsSinceEpoch + moveToTimeZoneOffset(milliSecsSinceEpoch, oldTz, newTz);
Date preDate = new Date(milliSecsSinceEpochNew);
// if date is on or before 1582-10-04, apply the difference
// by (H-H/4-2) where H is the hundreds digit of the year according to:
// http://en.wikipedia.org/wiki/Gregorian_calendar
Date newDate = ResultUtil.adjustDate(preDate);
logger.debug(
"Adjust date from {} to {}",
(ArgSupplier) preDate::toString,
(ArgSupplier) newDate::toString);
return newDate;
} catch (NumberFormatException ex) {
throw (SFException)
IncidentUtil.generateIncidentV2WithException(
null,
new SFException(ErrorCode.INTERNAL_ERROR, "Invalid date value: " + day),
null,
null);
}
}
/**
* simplified moveToTimeZone method
*
* @param milliSecsSinceEpoch
* @param oldTZ
* @param newTZ
* @return offset
*/
private static long moveToTimeZoneOffset(
long milliSecsSinceEpoch, TimeZone oldTZ, TimeZone newTZ) {
if (oldTZ.hasSameRules(newTZ)) {
// same time zone
return 0;
}
int offsetMillisInOldTZ = oldTZ.getOffset(milliSecsSinceEpoch);
Calendar calendar = CalendarCache.get(oldTZ);
calendar.setTimeInMillis(milliSecsSinceEpoch);
int millisecondWithinDay =
((calendar.get(Calendar.HOUR_OF_DAY) * 60 + calendar.get(Calendar.MINUTE)) * 60
+ calendar.get(Calendar.SECOND))
* 1000
+ calendar.get(Calendar.MILLISECOND);
int era = calendar.get(Calendar.ERA);
int year = calendar.get(Calendar.YEAR);
int month = calendar.get(Calendar.MONTH);
int dayOfMonth = calendar.get(Calendar.DAY_OF_MONTH);
int dayOfWeek = calendar.get(Calendar.DAY_OF_WEEK);
int offsetMillisInNewTZ =
newTZ.getOffset(era, year, month, dayOfMonth, dayOfWeek, millisecondWithinDay);
int offsetMillis = offsetMillisInOldTZ - offsetMillisInNewTZ;
return offsetMillis;
}
/**
* move the input timestamp form oldTZ to newTZ
*
* @param ts
* @param oldTZ
* @param newTZ
* @return timestamp in newTZ
*/
public static Timestamp moveToTimeZone(Timestamp ts, TimeZone oldTZ, TimeZone newTZ) {
long offset = moveToTimeZoneOffset(ts.getTime(), oldTZ, newTZ);
if (offset == 0) {
return ts;
}
int nanos = ts.getNanos();
ts = new Timestamp(ts.getTime() + offset);
ts.setNanos(nanos);
return ts;
}
/**
* generate Java Timestamp object
*
* @param epoch the value since epoch time
* @param scale the scale of the value
* @return
*/
public static Timestamp toJavaTimestamp(long epoch, int scale) {
long seconds = epoch / powerOfTen(scale);
int fraction = (int) ((epoch % powerOfTen(scale)) * powerOfTen(9 - scale));
if (fraction < 0) {
// handle negative case here
seconds--;
fraction += 1000000000;
}
return createTimestamp(seconds, fraction, TimeZone.getDefault(), false);
}
/**
* check whether the input seconds out of the scope of Java timestamp
*
* @param seconds
* @return
*/
public static boolean isTimestampOverflow(long seconds) {
return seconds < Long.MIN_VALUE / powerOfTen(3) || seconds > Long.MAX_VALUE / powerOfTen(3);
}
/**
* create Java timestamp using seconds since epoch and fraction in nanoseconds For example,
* 1232.234 represents as epoch = 1232 and fraction = 234,000,000 For example, -1232.234
* represents as epoch = -1233 and fraction = 766,000,000 For example, -0.13 represents as epoch =
* -1 and fraction = 870,000,000
*
* @param seconds
* @param fraction
* @param timezone - The timezone being used for the toString() formatting
* @param timezone -
* @return java timestamp object
*/
public static Timestamp createTimestamp(
long seconds, int fraction, TimeZone timezone, boolean useSessionTz) {
// If JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC=true, set timezone to UTC to get
// timestamp object. This will avoid moving the timezone and creating
// daylight savings offset errors.
if (useSessionTz) {
return new SnowflakeTimestampWithTimezone(
seconds * ArrowResultUtil.powerOfTen(3), fraction, timezone);
}
Timestamp ts = new Timestamp(seconds * ArrowResultUtil.powerOfTen(3));
ts.setNanos(fraction);
return ts;
}
}
| |
package com.ar.tothestars.listphotos;
import android.content.Context;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.ViewTreeObserver;
import android.widget.FrameLayout;
import android.widget.Toast;
import com.ar.tothestars.R;
import com.ar.tothestars.models.APODPhoto;
import com.ar.tothestars.services.APODManager;
import com.ar.tothestars.services.Credentials;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import retrofit.Callback;
import retrofit.RetrofitError;
import retrofit.client.Response;
/**
* Created by ariviere on 12/09/15.
*/
public class APODPhotosList extends FrameLayout implements SwipeRefreshLayout.OnRefreshListener,
PhotosListAdapter.Listener {
private final static String DATE_FORMAT = "yyyy-MM-dd";
private final static int LOADING_PHOTOS_COUNT = 7;
private SimpleDateFormat dateFormat;
private Calendar calendarReference;
private SwipeRefreshLayout refreshLayout;
private RecyclerView recyclerView;
private LinearLayoutManager layoutManager;
private ArrayList<APODPhoto> photos;
private PhotosListAdapter adapter;
private int photosWithError = 0;
private boolean isLoadingMore = false;
private int loadingPhotos;
private Listener listener;
private int recyclerScrollY = 0;
public APODPhotosList(Context context) {
super(context);
if (!isInEditMode()) {
init(context);
}
}
public APODPhotosList(Context context, AttributeSet attrs) {
super(context, attrs);
if (!isInEditMode()) {
init(context);
}
}
public APODPhotosList(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
if (!isInEditMode()) {
init(context);
}
}
@Override
public void onRefresh() {
photos.clear();
photosWithError = 0;
calendarReference = Calendar.getInstance();
startGettingPhotos();
}
@Override
public void onSavedButtonClicked() {
listener.onFavoriteAdded();
}
/**
* set listener
*
* @param listener listener
*/
public void setListener(Listener listener) {
this.listener = listener;
}
private void startGettingPhotos() {
loadingPhotos = LOADING_PHOTOS_COUNT;
Calendar calendar = (Calendar) calendarReference.clone();
getPhoto(calendar.getTime());
}
private void getPhoto(Date photoDate) {
String dateFormatted = dateFormat.format(photoDate);
APODManager.getClient()
.getPhoto(dateFormatted, true, Credentials.NASA_KEY, new Callback<APODPhoto>() {
@Override
public void success(APODPhoto photo, Response response) {
addPhoto(photo);
}
@Override
public void failure(RetrofitError error) {
Toast.makeText(getContext(), getResources().getString(R.string.server_error), Toast.LENGTH_SHORT).show();
}
});
}
private void addPhoto(APODPhoto photo) {
if (photo.isValid() && photo.getUrl() != null && !photo.getUrl().equals("")) {
photos.add(photo);
} else {
photosWithError++;
}
// load another photo until count is finished
if (loadingPhotos > 0) {
loadingPhotos--;
getPhoto(getNextDate());
} else {
isLoadingMore = false;
adapter.notifyDataSetChanged();
refreshLayout.setRefreshing(false);
}
}
private Date getNextDate() {
Calendar calendar = (Calendar) calendarReference.clone();
calendar.add(Calendar.DATE, -(photos.size() + photosWithError));
return calendar.getTime();
}
private void initRecyclerView() {
adapter = new PhotosListAdapter(getContext(), photos);
adapter.setListener(this);
recyclerView.setAdapter(adapter);
recyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
super.onScrolled(recyclerView, dx, dy);
recyclerScrollY += dy;
listener.onRecyclerScrolled(dy, recyclerScrollY);
if (!isLoadingMore
&& layoutManager.findLastVisibleItemPosition() > photos.size() - 3) {
isLoadingMore = true;
loadingPhotos = LOADING_PHOTOS_COUNT;
getPhoto(getNextDate());
}
}
});
}
private void init(final Context context) {
LayoutInflater.from(context).inflate(R.layout.item_photos_list, this);
refreshLayout = (SwipeRefreshLayout) findViewById(R.id.photo_refresh);
refreshLayout.setOnRefreshListener(this);
refreshLayout.setProgressViewEndTarget(false, 400);
refreshLayout.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
@Override
public boolean onPreDraw() {
getViewTreeObserver().removeOnPreDrawListener(this);
refreshLayout.setRefreshing(true);
return false;
}
});
recyclerView = (RecyclerView) findViewById(R.id.photos_recycler_view);
recyclerView.setHasFixedSize(true);
recyclerView.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
@Override
public boolean onPreDraw() {
recyclerView.getViewTreeObserver().removeOnPreDrawListener(this);
recyclerView.setPadding(0, getResources().getDimensionPixelSize(R.dimen.main_menu_buttons_height), 0, 0);
return false;
}
});
layoutManager = new LinearLayoutManager(getContext());
recyclerView.setLayoutManager(layoutManager);
dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US);
calendarReference = Calendar.getInstance();
calendarReference.add(Calendar.HOUR, -2);
photos = new ArrayList<>();
initRecyclerView();
if (photos.size() == 0) {
startGettingPhotos();
}
}
/**
* listener for photos list
*/
public interface Listener {
/**
* called when recycler view is scrolled
*
* @param dy dy
* @param recyclerScrollY total scrollY
*/
void onRecyclerScrolled(int dy, int recyclerScrollY);
/**
* called when a favorite is added
*/
void onFavoriteAdded();
}
}
| |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi.impl.source.tree.injected;
import com.intellij.ide.CopyProvider;
import com.intellij.ide.CutProvider;
import com.intellij.ide.DeleteProvider;
import com.intellij.ide.PasteProvider;
import com.intellij.ide.highlighter.HighlighterFactory;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.injected.editor.EditorWindow;
import com.intellij.injected.editor.MarkupModelWindow;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.event.EditorMouseEvent;
import com.intellij.openapi.editor.event.EditorMouseEventArea;
import com.intellij.openapi.editor.event.EditorMouseListener;
import com.intellij.openapi.editor.event.EditorMouseMotionListener;
import com.intellij.openapi.editor.ex.*;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.editor.highlighter.LightHighlighterClient;
import com.intellij.openapi.editor.impl.EditorImpl;
import com.intellij.openapi.editor.impl.TextDrawingCallback;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.fileTypes.SyntaxHighlighter;
import com.intellij.openapi.fileTypes.SyntaxHighlighterFactory;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.containers.UnsafeWeakList;
import com.intellij.util.ui.ButtonlessScrollBarUI;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.awt.geom.Point2D;
import java.beans.PropertyChangeListener;
import java.util.Collection;
import java.util.Iterator;
import java.util.function.IntFunction;
class EditorWindowImpl extends com.intellij.injected.editor.EditorWindowImpl implements EditorWindow, EditorEx {
private final DocumentWindowImpl myDocumentWindow;
private final EditorImpl myDelegate;
private volatile PsiFile myInjectedFile;
private final boolean myOneLine;
private final CaretModelWindow myCaretModelDelegate;
private final SelectionModelWindow mySelectionModelDelegate;
private static final Collection<EditorWindowImpl> allEditors = new UnsafeWeakList<>(); // guarded by allEditors
private volatile boolean myDisposed;
private final MarkupModelWindow myMarkupModelDelegate;
private final MarkupModelWindow myDocumentMarkupModelDelegate;
private final FoldingModelWindow myFoldingModelWindow;
private final SoftWrapModelWindow mySoftWrapModel;
private final InlayModelWindow myInlayModel;
@NotNull
static Editor create(@NotNull final DocumentWindowImpl documentRange, @NotNull final EditorImpl editor, @NotNull final PsiFile injectedFile) {
assert documentRange.isValid();
assert injectedFile.isValid();
EditorWindowImpl window;
synchronized (allEditors) {
for (EditorWindowImpl editorWindow : allEditors) {
if (editorWindow.getDocument() == documentRange && editorWindow.getDelegate() == editor) {
editorWindow.myInjectedFile = injectedFile;
if (editorWindow.isValid()) {
return editorWindow;
}
}
}
window = new EditorWindowImpl(documentRange, editor, injectedFile, documentRange.isOneLine());
allEditors.add(window);
}
window.checkValid();
return window;
}
private EditorWindowImpl(@NotNull DocumentWindowImpl documentWindow,
@NotNull final EditorImpl delegate,
@NotNull PsiFile injectedFile,
boolean oneLine) {
myDocumentWindow = documentWindow;
myDelegate = delegate;
myInjectedFile = injectedFile;
myOneLine = oneLine;
myCaretModelDelegate = new CaretModelWindow(myDelegate.getCaretModel(), this);
mySelectionModelDelegate = new SelectionModelWindow(myDelegate, myDocumentWindow,this);
myMarkupModelDelegate = new MarkupModelWindow(myDelegate.getMarkupModel(), myDocumentWindow);
myDocumentMarkupModelDelegate = new MarkupModelWindow(myDelegate.getFilteredDocumentMarkupModel(), myDocumentWindow);
myFoldingModelWindow = new FoldingModelWindow(delegate.getFoldingModel(), documentWindow, this);
mySoftWrapModel = new SoftWrapModelWindow();
myInlayModel = new InlayModelWindow();
}
static void disposeInvalidEditors() {
ApplicationManager.getApplication().assertWriteAccessAllowed();
synchronized (allEditors) {
Iterator<EditorWindowImpl> iterator = allEditors.iterator();
while (iterator.hasNext()) {
EditorWindowImpl editorWindow = iterator.next();
if (!editorWindow.isValid()) {
disposeEditor(editorWindow);
iterator.remove();
}
}
}
}
private static void disposeEditor(@NotNull EditorWindow editorWindow) {
EditorWindowImpl impl = (EditorWindowImpl)editorWindow;
impl.dispose();
InjectedLanguageUtil.clearCaches(impl.myInjectedFile, impl.getDocument());
}
static void disposeEditorFor(@NotNull DocumentWindow documentWindow) {
synchronized (allEditors) {
for (Iterator<EditorWindowImpl> iterator = allEditors.iterator(); iterator.hasNext(); ) {
EditorWindowImpl editor = iterator.next();
if (InjectionRegistrarImpl.intersect(editor.getDocument(), (DocumentWindowImpl)documentWindow)) {
disposeEditor(editor);
iterator.remove();
break;
}
}
}
}
@Override
public boolean isValid() {
return !isDisposed() && !myInjectedFile.getProject().isDisposed() && myInjectedFile.isValid() && myDocumentWindow.isValid();
}
private void checkValid() {
PsiUtilCore.ensureValid(myInjectedFile);
if (!isValid()) {
StringBuilder reason = new StringBuilder("Not valid");
if (myDisposed) reason.append("; editorWindow: disposed");
if (!myDocumentWindow.isValid()) reason.append("; documentWindow: invalid");
if (myDelegate.isDisposed()) reason.append("; editor: disposed");
if (myInjectedFile.getProject().isDisposed()) reason.append("; project: disposed");
throw new AssertionError(reason.toString());
}
}
@Override
@NotNull
public PsiFile getInjectedFile() {
return myInjectedFile;
}
@Override
@NotNull
public LogicalPosition hostToInjected(@NotNull LogicalPosition hPos) {
checkValid();
DocumentEx hostDocument = myDelegate.getDocument();
int hLineEndOffset = hPos.line >= hostDocument.getLineCount() ? hostDocument.getTextLength() : hostDocument.getLineEndOffset(hPos.line);
LogicalPosition hLineEndPos = myDelegate.offsetToLogicalPosition(hLineEndOffset);
if (hLineEndPos.column < hPos.column) {
// in virtual space
LogicalPosition iPos = myDocumentWindow.hostToInjectedInVirtualSpace(hPos);
if (iPos != null) {
return iPos;
}
}
int hOffset = myDelegate.logicalPositionToOffset(hPos);
int iOffset = myDocumentWindow.hostToInjected(hOffset);
return offsetToLogicalPosition(iOffset);
}
@Override
@NotNull
public LogicalPosition injectedToHost(@NotNull LogicalPosition pos) {
checkValid();
int offset = logicalPositionToOffset(pos);
LogicalPosition samePos = offsetToLogicalPosition(offset);
int virtualSpaceDelta = offset < myDocumentWindow.getTextLength() && samePos.line == pos.line && samePos.column < pos.column ?
pos.column - samePos.column : 0;
LogicalPosition hostPos = myDelegate.offsetToLogicalPosition(myDocumentWindow.injectedToHost(offset));
return new LogicalPosition(hostPos.line, hostPos.column + virtualSpaceDelta);
}
private void dispose() {
assert !myDisposed;
myCaretModelDelegate.disposeModel();
for (EditorMouseListener wrapper : myEditorMouseListeners.wrappers()) {
myDelegate.removeEditorMouseListener(wrapper);
}
myEditorMouseListeners.clear();
for (EditorMouseMotionListener wrapper : myEditorMouseMotionListeners.wrappers()) {
myDelegate.removeEditorMouseMotionListener(wrapper);
}
myEditorMouseMotionListeners.clear();
myDisposed = true;
Disposer.dispose(myDocumentWindow);
}
@Override
public void setViewer(boolean isViewer) {
myDelegate.setViewer(isViewer);
}
@Override
public boolean isViewer() {
return myDelegate.isViewer();
}
@Override
public boolean isRendererMode() {
return myDelegate.isRendererMode();
}
@Override
public void setRendererMode(final boolean isRendererMode) {
myDelegate.setRendererMode(isRendererMode);
}
@Override
public void setFile(final VirtualFile vFile) {
myDelegate.setFile(vFile);
}
@Override
public void setHeaderComponent(@Nullable JComponent header) {
}
@Override
public boolean hasHeaderComponent() {
return false;
}
@Override
@Nullable
public JComponent getHeaderComponent() {
return null;
}
@Override
public TextDrawingCallback getTextDrawingCallback() {
return myDelegate.getTextDrawingCallback();
}
@Override
@NotNull
public SelectionModel getSelectionModel() {
return mySelectionModelDelegate;
}
@Override
@NotNull
public MarkupModelEx getMarkupModel() {
return myMarkupModelDelegate;
}
@NotNull
@Override
public MarkupModelEx getFilteredDocumentMarkupModel() {
return myDocumentMarkupModelDelegate;
}
@Override
@NotNull
public FoldingModelEx getFoldingModel() {
return myFoldingModelWindow;
}
@Override
@NotNull
public CaretModel getCaretModel() {
return myCaretModelDelegate;
}
@Override
@NotNull
public ScrollingModelEx getScrollingModel() {
return myDelegate.getScrollingModel();
}
@Override
@NotNull
public SoftWrapModelEx getSoftWrapModel() {
return mySoftWrapModel;
}
@Override
@NotNull
public EditorSettings getSettings() {
return myDelegate.getSettings();
}
@NotNull
@Override
public InlayModel getInlayModel() {
return myInlayModel;
}
@NotNull
@Override
public EditorKind getEditorKind() {
return myDelegate.getEditorKind();
}
@Override
public void reinitSettings() {
myDelegate.reinitSettings();
}
@Override
public void setFontSize(final int fontSize) {
myDelegate.setFontSize(fontSize);
}
@Override
public void setHighlighter(@NotNull final EditorHighlighter highlighter) {
myDelegate.setHighlighter(highlighter);
}
@NotNull
@Override
public EditorHighlighter getHighlighter() {
EditorColorsScheme scheme = EditorColorsManager.getInstance().getGlobalScheme();
SyntaxHighlighter syntaxHighlighter =
SyntaxHighlighterFactory.getSyntaxHighlighter(myInjectedFile.getLanguage(), getProject(), myInjectedFile.getVirtualFile());
EditorHighlighter highlighter = HighlighterFactory.createHighlighter(syntaxHighlighter, scheme);
highlighter.setText(getDocument().getText());
highlighter.setEditor(new LightHighlighterClient(getDocument(), getProject()));
return highlighter;
}
@Override
public JComponent getPermanentHeaderComponent() {
return myDelegate.getPermanentHeaderComponent();
}
@Override
public void setPermanentHeaderComponent(JComponent component) {
myDelegate.setPermanentHeaderComponent(component);
}
@Override
@NotNull
public JComponent getContentComponent() {
return myDelegate.getContentComponent();
}
@NotNull
@Override
public EditorGutterComponentEx getGutterComponentEx() {
return myDelegate.getGutterComponentEx();
}
@Override
public void addPropertyChangeListener(@NotNull final PropertyChangeListener listener) {
myDelegate.addPropertyChangeListener(listener);
}
@Override
public void addPropertyChangeListener(@NotNull PropertyChangeListener listener, @NotNull Disposable parentDisposable) {
myDelegate.addPropertyChangeListener(listener, parentDisposable);
}
@Override
public void removePropertyChangeListener(@NotNull final PropertyChangeListener listener) {
myDelegate.removePropertyChangeListener(listener);
}
@Override
public void setInsertMode(final boolean mode) {
myDelegate.setInsertMode(mode);
}
@Override
public boolean isInsertMode() {
return myDelegate.isInsertMode();
}
@Override
public void setColumnMode(final boolean mode) {
myDelegate.setColumnMode(mode);
}
@Override
public boolean isColumnMode() {
return myDelegate.isColumnMode();
}
@Override
@NotNull
public VisualPosition xyToVisualPosition(@NotNull final Point p) {
return logicalToVisualPosition(xyToLogicalPosition(p));
}
@NotNull
@Override
public VisualPosition xyToVisualPosition(@NotNull Point2D p) {
checkValid();
Point2D pp = p.getX() >= 0 && p.getY() >= 0 ? p : new Point2D.Double(Math.max(p.getX(), 0), Math.max(p.getY(), 0));
LogicalPosition hostPos = myDelegate.visualToLogicalPosition(myDelegate.xyToVisualPosition(pp));
return logicalToVisualPosition(hostToInjected(hostPos));
}
@Override
@NotNull
public VisualPosition offsetToVisualPosition(final int offset) {
return logicalToVisualPosition(offsetToLogicalPosition(offset));
}
@Override
@NotNull
public VisualPosition offsetToVisualPosition(int offset, boolean leanForward, boolean beforeSoftWrap) {
return logicalToVisualPosition(offsetToLogicalPosition(offset).leanForward(leanForward));
}
@Override
@NotNull
public LogicalPosition offsetToLogicalPosition(final int offset) {
checkValid();
int lineNumber = myDocumentWindow.getLineNumber(offset);
int lineStartOffset = myDocumentWindow.getLineStartOffset(lineNumber);
int column = calcLogicalColumnNumber(offset-lineStartOffset, lineNumber, lineStartOffset);
return new LogicalPosition(lineNumber, column);
}
@NotNull
@Override
public EditorColorsScheme createBoundColorSchemeDelegate(@Nullable EditorColorsScheme customGlobalScheme) {
return myDelegate.createBoundColorSchemeDelegate(customGlobalScheme);
}
@Override
@NotNull
public LogicalPosition xyToLogicalPosition(@NotNull final Point p) {
checkValid();
LogicalPosition hostPos = myDelegate.xyToLogicalPosition(p);
return hostToInjected(hostPos);
}
@Override
@NotNull
public Point logicalPositionToXY(@NotNull final LogicalPosition pos) {
checkValid();
LogicalPosition hostPos = injectedToHost(pos);
return myDelegate.logicalPositionToXY(hostPos);
}
@Override
@NotNull
public Point visualPositionToXY(@NotNull final VisualPosition pos) {
checkValid();
return logicalPositionToXY(visualToLogicalPosition(pos));
}
@NotNull
@Override
public Point2D visualPositionToPoint2D(@NotNull VisualPosition pos) {
checkValid();
LogicalPosition hostLogical = injectedToHost(visualToLogicalPosition(pos));
VisualPosition hostVisual = myDelegate.logicalToVisualPosition(hostLogical);
return myDelegate.visualPositionToPoint2D(hostVisual);
}
@Override
public void repaint(final int startOffset, final int endOffset) {
checkValid();
myDelegate.repaint(myDocumentWindow.injectedToHost(startOffset), myDocumentWindow.injectedToHost(endOffset));
}
@Override
@NotNull
public DocumentWindowImpl getDocument() {
return myDocumentWindow;
}
@Override
@NotNull
public JComponent getComponent() {
return myDelegate.getComponent();
}
private final ListenerWrapperMap<EditorMouseListener> myEditorMouseListeners = new ListenerWrapperMap<>();
@Override
public void addEditorMouseListener(@NotNull final EditorMouseListener listener) {
checkValid();
EditorMouseListener wrapper = new EditorMouseListener() {
@Override
public void mousePressed(EditorMouseEvent e) {
listener.mousePressed(new EditorMouseEvent(EditorWindowImpl.this, e.getMouseEvent(), e.getArea()));
}
@Override
public void mouseClicked(EditorMouseEvent e) {
listener.mouseClicked(new EditorMouseEvent(EditorWindowImpl.this, e.getMouseEvent(), e.getArea()));
}
@Override
public void mouseReleased(EditorMouseEvent e) {
listener.mouseReleased(new EditorMouseEvent(EditorWindowImpl.this, e.getMouseEvent(), e.getArea()));
}
@Override
public void mouseEntered(EditorMouseEvent e) {
listener.mouseEntered(new EditorMouseEvent(EditorWindowImpl.this, e.getMouseEvent(), e.getArea()));
}
@Override
public void mouseExited(EditorMouseEvent e) {
listener.mouseExited(new EditorMouseEvent(EditorWindowImpl.this, e.getMouseEvent(), e.getArea()));
}
};
myEditorMouseListeners.registerWrapper(listener, wrapper);
myDelegate.addEditorMouseListener(wrapper);
}
@Override
public void removeEditorMouseListener(@NotNull final EditorMouseListener listener) {
EditorMouseListener wrapper = myEditorMouseListeners.removeWrapper(listener);
// HintManager might have an old editor instance
if (wrapper != null) {
myDelegate.removeEditorMouseListener(wrapper);
}
}
private final ListenerWrapperMap<EditorMouseMotionListener> myEditorMouseMotionListeners = new ListenerWrapperMap<>();
@Override
public void addEditorMouseMotionListener(@NotNull final EditorMouseMotionListener listener) {
checkValid();
EditorMouseMotionListener wrapper = new EditorMouseMotionListener() {
@Override
public void mouseMoved(EditorMouseEvent e) {
listener.mouseMoved(new EditorMouseEvent(EditorWindowImpl.this, e.getMouseEvent(), e.getArea()));
}
@Override
public void mouseDragged(EditorMouseEvent e) {
listener.mouseDragged(new EditorMouseEvent(EditorWindowImpl.this, e.getMouseEvent(), e.getArea()));
}
};
myEditorMouseMotionListeners.registerWrapper(listener, wrapper);
myDelegate.addEditorMouseMotionListener(wrapper);
}
@Override
public void removeEditorMouseMotionListener(@NotNull final EditorMouseMotionListener listener) {
EditorMouseMotionListener wrapper = myEditorMouseMotionListeners.removeWrapper(listener);
if (wrapper != null) {
myDelegate.removeEditorMouseMotionListener(wrapper);
}
}
@Override
public boolean isDisposed() {
return myDisposed || myDelegate.isDisposed();
}
@Override
public void setBackgroundColor(final Color color) {
myDelegate.setBackgroundColor(color);
}
@NotNull
@Override
public Color getBackgroundColor() {
return myDelegate.getBackgroundColor();
}
@Override
public int getMaxWidthInRange(final int startOffset, final int endOffset) {
return myDelegate.getMaxWidthInRange(startOffset, endOffset);
}
@Override
public int getLineHeight() {
return myDelegate.getLineHeight();
}
@Override
public Dimension getContentSize() {
return myDelegate.getContentSize();
}
@NotNull
@Override
public JScrollPane getScrollPane() {
return myDelegate.getScrollPane();
}
@Override
public void setBorder(Border border) {
myDelegate.setBorder(border);
}
@Override
public Insets getInsets() {
return myDelegate.getInsets();
}
@Override
public int logicalPositionToOffset(@NotNull final LogicalPosition pos) {
int lineStartOffset = myDocumentWindow.getLineStartOffset(pos.line);
return calcOffset(pos.column, pos.line, lineStartOffset);
}
private int calcLogicalColumnNumber(int offsetInLine, int lineNumber, int lineStartOffset) {
if (myDocumentWindow.getTextLength() == 0) return 0;
if (offsetInLine==0) return 0;
int end = myDocumentWindow.getLineEndOffset(lineNumber);
if (offsetInLine > end- lineStartOffset) offsetInLine = end - lineStartOffset;
CharSequence text = myDocumentWindow.getCharsSequence();
return EditorUtil.calcColumnNumber(this, text, lineStartOffset, lineStartOffset +offsetInLine);
}
private int calcOffset(int col, int lineNumber, int lineStartOffset) {
CharSequence text = myDocumentWindow.getImmutableCharSequence();
int tabSize = EditorUtil.getTabSize(myDelegate);
int end = myDocumentWindow.getLineEndOffset(lineNumber);
int currentColumn = 0;
for (int i = lineStartOffset; i < end; i++) {
char c = text.charAt(i);
if (c == '\t') {
currentColumn = (currentColumn / tabSize + 1) * tabSize;
}
else {
currentColumn++;
}
if (col < currentColumn) return i;
}
return end;
}
// assuming there is no folding in injected documents
@Override
@NotNull
public VisualPosition logicalToVisualPosition(@NotNull final LogicalPosition pos) {
checkValid();
return new VisualPosition(pos.line, pos.column);
}
@Override
@NotNull
public LogicalPosition visualToLogicalPosition(@NotNull final VisualPosition pos) {
checkValid();
return new LogicalPosition(pos.line, pos.column);
}
@NotNull
@Override
public DataContext getDataContext() {
return myDelegate.getDataContext();
}
@Override
public EditorMouseEventArea getMouseEventArea(@NotNull final MouseEvent e) {
return myDelegate.getMouseEventArea(e);
}
@Override
public boolean setCaretVisible(final boolean b) {
return myDelegate.setCaretVisible(b);
}
@Override
public boolean setCaretEnabled(boolean enabled) {
return myDelegate.setCaretEnabled(enabled);
}
@Override
public void addFocusListener(@NotNull final FocusChangeListener listener) {
myDelegate.addFocusListener(listener);
}
@Override
public void addFocusListener(@NotNull FocusChangeListener listener, @NotNull Disposable parentDisposable) {
myDelegate.addFocusListener(listener, parentDisposable);
}
@Override
public Project getProject() {
return myDelegate.getProject();
}
@Override
public boolean isOneLineMode() {
return myOneLine;
}
@Override
public void setOneLineMode(final boolean isOneLineMode) {
throw new UnsupportedOperationException();
}
@Override
public boolean isEmbeddedIntoDialogWrapper() {
return myDelegate.isEmbeddedIntoDialogWrapper();
}
@Override
public void setEmbeddedIntoDialogWrapper(final boolean b) {
myDelegate.setEmbeddedIntoDialogWrapper(b);
}
@Override
public VirtualFile getVirtualFile() {
return myDelegate.getVirtualFile();
}
@Override
public CopyProvider getCopyProvider() {
return myDelegate.getCopyProvider();
}
@Override
public CutProvider getCutProvider() {
return myDelegate.getCutProvider();
}
@Override
public PasteProvider getPasteProvider() {
return myDelegate.getPasteProvider();
}
@Override
public DeleteProvider getDeleteProvider() {
return myDelegate.getDeleteProvider();
}
@Override
public void setColorsScheme(@NotNull final EditorColorsScheme scheme) {
myDelegate.setColorsScheme(scheme);
}
@Override
@NotNull
public EditorColorsScheme getColorsScheme() {
return myDelegate.getColorsScheme();
}
@Override
public void setVerticalScrollbarOrientation(final int type) {
myDelegate.setVerticalScrollbarOrientation(type);
}
@Override
public int getVerticalScrollbarOrientation() {
return myDelegate.getVerticalScrollbarOrientation();
}
@Override
public void setVerticalScrollbarVisible(final boolean b) {
myDelegate.setVerticalScrollbarVisible(b);
}
@Override
public void setHorizontalScrollbarVisible(final boolean b) {
myDelegate.setHorizontalScrollbarVisible(b);
}
@Override
public boolean processKeyTyped(@NotNull final KeyEvent e) {
return myDelegate.processKeyTyped(e);
}
@Override
@NotNull
public EditorGutter getGutter() {
return myDelegate.getGutter();
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final EditorWindowImpl that = (EditorWindowImpl)o;
DocumentWindow thatWindow = that.getDocument();
return myDelegate.equals(that.myDelegate) && myDocumentWindow.equals(thatWindow);
}
@Override
public int hashCode() {
return myDocumentWindow.hashCode();
}
@NotNull
@Override
public Editor getDelegate() {
return myDelegate;
}
@NotNull
@Override
public IndentsModel getIndentsModel() {
return myDelegate.getIndentsModel();
}
@Override
public void setPlaceholder(@Nullable CharSequence text) {
myDelegate.setPlaceholder(text);
}
@Override
public void setPlaceholderAttributes(@Nullable TextAttributes attributes) {
myDelegate.setPlaceholderAttributes(attributes);
}
@Override
public void setShowPlaceholderWhenFocused(boolean show) {
myDelegate.setShowPlaceholderWhenFocused(show);
}
@Override
public boolean isStickySelection() {
return myDelegate.isStickySelection();
}
@Override
public void setStickySelection(boolean enable) {
myDelegate.setStickySelection(enable);
}
@Override
public boolean isPurePaintingMode() {
return myDelegate.isPurePaintingMode();
}
@Override
public void setPurePaintingMode(boolean enabled) {
myDelegate.setPurePaintingMode(enabled);
}
@Override
public void registerLineExtensionPainter(IntFunction<Collection<LineExtensionInfo>> lineExtensionPainter) {
throw new UnsupportedOperationException();
}
@Override
public void registerScrollBarRepaintCallback(@Nullable ButtonlessScrollBarUI.ScrollbarRepaintCallback callback) {
myDelegate.registerScrollBarRepaintCallback(callback);
}
@Override
public void setPrefixTextAndAttributes(@Nullable String prefixText, @Nullable TextAttributes attributes) {
myDelegate.setPrefixTextAndAttributes(prefixText, attributes);
}
@Override
public int getPrefixTextWidthInPixels() {
return myDelegate.getPrefixTextWidthInPixels();
}
@Override
public String toString() {
return super.toString() + "[disposed=" + myDisposed + "; valid=" + isValid() + "]";
}
@Override
public int getExpectedCaretOffset() {
return myDocumentWindow.hostToInjected(myDelegate.getExpectedCaretOffset());
}
@Override
public void setContextMenuGroupId(@Nullable String groupId) {
myDelegate.setContextMenuGroupId(groupId);
}
@Nullable
@Override
public String getContextMenuGroupId() {
return myDelegate.getContextMenuGroupId();
}
}
| |
/*******************************************************************************
* Copyright (c) 2013, Fabrizio Falchi (NeMIS Lab., ISTI-CNR, Italy)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
package it.cnr.isti.vir.features.localfeatures;
import it.cnr.isti.vir.distance.L2;
import it.cnr.isti.vir.features.IUByteValues;
import it.cnr.isti.vir.util.math.Mean;
import it.cnr.isti.vir.util.math.Norm;
import it.cnr.isti.vir.util.math.Normalize;
import it.cnr.isti.vir.util.math.VectorMath;
import java.io.BufferedReader;
import java.io.DataInput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.Iterator;
public class RootSIFT extends ALocalFeature<RootSIFTGroup> implements IUByteValues {
static final int VLEN = 128;
private final static float sqrt2 = (float) Math.sqrt(2.0);
private static final double maxSQRDistValue = 255 * 255 * 128;
public static boolean L2Norm = true;
public static boolean isL2Norm() {
return L2Norm;
}
public static void setL2Norm(boolean l2Norm) {
L2Norm = l2Norm;
}
public final byte[] values; /* Vector of descriptor values (- 128 for storing in java byte) */
public Class getGroupClass() { return RootSIFTGroup.class; };
@Override
public final int getLength() {
return VLEN;
}
@Override
public byte[] getValues() {
return values;
}
public RootSIFT(SIFT sift, RootSIFTGroup givenLinkedGroup) {
linkedGroup = givenLinkedGroup;
kp = sift.kp;
values = RootSIFT.getRootSIFTValues( sift.values );
}
public RootSIFT(DataInput str ) throws IOException {
super(str);
values = new byte[VLEN];
str.readFully(values);
}
public RootSIFT(ByteBuffer src ) throws IOException {
super(src);
values = new byte[VLEN];
src.get(values);
}
public int getDataByteSize() {
return VLEN;
}
public int putDescriptor(byte[] bArr, int bArrI) {
System.arraycopy(values, 0, bArr, bArrI, VLEN);
return bArrI + VLEN;
}
private RootSIFT(byte[] values) {
super((KeyPoint) null, null);
this.values = values;
}
public RootSIFT(KeyPoint kp, byte[] values, RootSIFTGroup group) {
this.kp = kp;
this.values = values;
this.linkedGroup = group;
}
@Override
public int compareTo(ALocalFeature<RootSIFTGroup> given) {
if ( this == given ) return 0;
if ( this.kp != given.kp ) {
if ( kp == null ) return -1;
if ( given.kp == null ) return 1;
int tComp = this.kp.compareTo( given.kp);
if ( tComp != 0 ) return tComp;
}
for ( int i=0; i<values.length; i++ ) {
int tComp = Byte.compare(values[i], ((RootSIFT)given).values[i]);
if ( tComp != 0 ) return tComp;
}
return 0;
}
/**
* This is the core of the RootSIFT
*
* @param value
* @param sum
* @return
*/
public final static int getRootSIFTValue( int value, int sum ) {
return (int)
(
// Squared root
Math.sqrt( (double) value / sum ) // between 0 and 1
* 255 // to have between 0 and 255
);
}
public final static float[] getRootSIFTFloatValues( byte[] siftValues ) {
double l1Norm = Norm.l1(siftValues) + 128 * 128;
// Root of L1 Norm
double[] dValues = new double[128];
for ( int i=0; i<siftValues.length; i++ ) {
// sqrt of L1 Normalized vectors
dValues[i] = Math.sqrt( (siftValues[i]+128) / l1Norm );
}
if ( L2Norm ) {
Normalize.l2(dValues);
}
return VectorMath.getFloats(dValues);
}
public final static FloatsLF getFloatsLFValues( SIFT sift ) {
return new FloatsLF(sift.kp, getRootSIFTFloatValues(sift.values));
}
public final static byte[] getRootSIFTValues( byte[] siftValues ) {
float[] fValues = getRootSIFTFloatValues( siftValues );
byte[] values = new byte[128];
for ( int i=0; i<siftValues.length; i++ ) {
values[i] = (byte) ( (int) ( fValues[i]*255.0) - 128);
}
// // Sum for L1 Normalization
// int sum = 0;
// for ( byte curr : siftValues ) {
// sum += curr;
// }
//
// // java does not have UBytes
// sum += 128*128;
//
// byte[] values = new byte[128];
// if ( !L2Norm ) {
//
// for ( int i=0; i<siftValues.length; i++ ) {
// values[i] = (byte) (getRootSIFTValue( siftValues[i]+128, sum)-128);
// }
//
// } else {
//
// // Root of L1 Norm
// double[] dValues = new double[128];
// for ( int i=0; i<siftValues.length; i++ ) {
// dValues[i] = Math.sqrt( (float) (siftValues[i]+128) / sum );
// }
//
// // L2 Norm
// double dSum = 0;
// for ( double curr : dValues ) {
// dSum += curr*curr;
// }
//
// Normalize.l2(dValues);
//
// for ( int i=0; i<siftValues.length; i++ ) {
// values[i] = (byte) ( (int) ( dValues[i]*255.0) - 128);
// }
// }
//
//
return values;
}
public RootSIFT(BufferedReader br, RootSIFTGroup group) throws IOException {
SIFT sift = new SIFT(br);
this.kp = sift.kp;
this.linkedGroup = group;
this.values = getRootSIFTValues(sift.values);
}
public static RootSIFT getMean(Collection<RootSIFT> coll) {
if ( coll.size() == 0 ) return null;
byte[][] bytes = new byte[coll.size()][];
int i=0;
for ( Iterator<RootSIFT> it = coll.iterator(); it.hasNext(); ) {
bytes[i++] = it.next().values;
}
return new RootSIFT(Mean.getMean(bytes));
}
public static final double getL2SQDistance_Norm(RootSIFT s1, RootSIFT s2 ) {
return getL2SQDistance(s1,s2)/maxSQRDistValue;
}
public static final double getL2SQDistance_Norm(RootSIFT s1, RootSIFT s2, double maxDist ) {
return getL2SQDistance(s1,s2,(int) Math.ceil(maxDist*maxSQRDistValue))/maxSQRDistValue;
}
public static final double getL2Distance(RootSIFT s1, RootSIFT s2) {
return Math.sqrt( getL2SQDistance_Norm(s1, s2) );
}
public static final double getL2Distance(RootSIFT s1, RootSIFT s2, int maxDist ) {
return Math.sqrt( getL2SQDistance_Norm(s1, s2, maxDist) );
}
public static final int getL2SQDistance(RootSIFT s1, RootSIFT s2) {
return L2.getSquared(s1.values, s2.values);
}
public static final int getL2SQDistance(RootSIFT s1, RootSIFT s2, int maxDist ) {
return L2.getSquared(s1.values, s2.values, maxDist);
}
}
| |
package com.dieam.reactnativepushnotification.modules;
import android.app.AlarmManager;
import android.app.Application;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import android.media.AudioManager;
import com.facebook.react.bridge.ReadableMap;
import org.json.JSONArray;
import org.json.JSONException;
import java.util.Arrays;
import static com.dieam.reactnativepushnotification.modules.RNPushNotification.LOG_TAG;
import static com.dieam.reactnativepushnotification.modules.RNPushNotificationAttributes.fromJson;
public class RNPushNotificationHelper {
public static final String PREFERENCES_KEY = "rn_push_notification";
private static final long DEFAULT_VIBRATION = 300L;
private Context context;
private final SharedPreferences scheduledNotificationsPersistence;
private static final int ONE_MINUTE = 60 * 1000;
private static final long ONE_HOUR = 60 * ONE_MINUTE;
private static final long ONE_DAY = 24 * ONE_HOUR;
public RNPushNotificationHelper(Application context) {
this.context = context;
this.scheduledNotificationsPersistence = context.getSharedPreferences(RNPushNotificationHelper.PREFERENCES_KEY, Context.MODE_PRIVATE);
}
public Class getMainActivityClass() {
String packageName = context.getPackageName();
Intent launchIntent = context.getPackageManager().getLaunchIntentForPackage(packageName);
String className = launchIntent.getComponent().getClassName();
try {
return Class.forName(className);
} catch (ClassNotFoundException e) {
e.printStackTrace();
return null;
}
}
private AlarmManager getAlarmManager() {
return (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
}
private PendingIntent toScheduleNotificationIntent(Bundle bundle) {
int notificationID = Integer.parseInt(bundle.getString("id"));
Intent notificationIntent = new Intent(context, RNPushNotificationPublisher.class);
notificationIntent.putExtra(RNPushNotificationPublisher.NOTIFICATION_ID, notificationID);
notificationIntent.putExtras(bundle);
return PendingIntent.getBroadcast(context, notificationID, notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT);
}
public void sendNotificationScheduled(Bundle bundle) {
Class intentClass = getMainActivityClass();
if (intentClass == null) {
Log.e(LOG_TAG, "No activity class found for the scheduled notification");
return;
}
if (bundle.getString("message") == null) {
Log.e(LOG_TAG, "No message specified for the scheduled notification");
return;
}
if (bundle.getString("id") == null) {
Log.e(LOG_TAG, "No notification ID specified for the scheduled notification");
return;
}
double fireDate = bundle.getDouble("fireDate");
if (fireDate == 0) {
Log.e(LOG_TAG, "No date specified for the scheduled notification");
return;
}
RNPushNotificationAttributes notificationAttributes = new RNPushNotificationAttributes(bundle);
String id = notificationAttributes.getId();
Log.d(LOG_TAG, "Storing push notification with id " + id);
SharedPreferences.Editor editor = scheduledNotificationsPersistence.edit();
editor.putString(id, notificationAttributes.toJson().toString());
commit(editor);
boolean isSaved = scheduledNotificationsPersistence.contains(id);
if (!isSaved) {
Log.e(LOG_TAG, "Failed to save " + id);
}
sendNotificationScheduledCore(bundle);
}
public void sendNotificationScheduledCore(Bundle bundle) {
long fireDate = (long) bundle.getDouble("fireDate");
// If the fireDate is in past, this will fire immediately and show the
// notification to the user
PendingIntent pendingIntent = toScheduleNotificationIntent(bundle);
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
getAlarmManager().setExactAndAllowWhileIdle(AlarmManager.RTC_WAKEUP, fireDate, pendingIntent);
} else if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.KITKAT) {
getAlarmManager().setExact(AlarmManager.RTC_WAKEUP, fireDate, pendingIntent);
} else {
getAlarmManager().set(AlarmManager.RTC_WAKEUP, fireDate, pendingIntent);
}
}
public void sendToNotificationCentre(Bundle bundle) {
try {
Class intentClass = getMainActivityClass();
if (intentClass == null) {
Log.e(LOG_TAG, "No activity class found for the notification");
return;
}
if (bundle.getString("message") == null) {
// this happens when a 'data' notification is received - we do not synthesize a local notification in this case
Log.d(LOG_TAG, "Cannot send to notification centre because there is no 'message' field in: " + bundle);
return;
}
String notificationIdString = bundle.getString("id");
if (notificationIdString == null) {
Log.e(LOG_TAG, "No notification ID specified for the notification");
return;
}
Resources res = context.getResources();
String packageName = context.getPackageName();
String title = bundle.getString("title");
if (title == null) {
ApplicationInfo appInfo = context.getApplicationInfo();
title = context.getPackageManager().getApplicationLabel(appInfo).toString();
}
NotificationCompat.Builder notification = new NotificationCompat.Builder(context)
.setContentTitle(title)
.setTicker(bundle.getString("ticker"))
.setVisibility(NotificationCompat.VISIBILITY_PRIVATE)
.setPriority(NotificationCompat.PRIORITY_MAX)
.setAutoCancel(bundle.getBoolean("autoCancel", true));
String group = bundle.getString("group");
if (group != null) {
notification.setGroup(group);
}
Double alarmUUID = bundle.getDouble("alarmUUID");
if (alarmUUID != null) {
bundle.putDouble("alarmUUID", alarmUUID);
}
notification.setContentText(bundle.getString("message"));
String largeIcon = bundle.getString("largeIcon");
String subText = bundle.getString("subText");
if (subText != null) {
notification.setSubText(subText);
}
String numberString = bundle.getString("number");
if (numberString != null) {
notification.setNumber(Integer.parseInt(numberString));
}
int smallIconResId;
int largeIconResId;
String smallIcon = bundle.getString("smallIcon");
if (smallIcon != null) {
smallIconResId = res.getIdentifier(smallIcon, "mipmap", packageName);
} else {
smallIconResId = res.getIdentifier("ic_notification", "mipmap", packageName);
}
if (smallIconResId == 0) {
smallIconResId = res.getIdentifier("ic_launcher", "mipmap", packageName);
if (smallIconResId == 0) {
smallIconResId = android.R.drawable.ic_dialog_info;
}
}
if (largeIcon != null) {
largeIconResId = res.getIdentifier(largeIcon, "mipmap", packageName);
} else {
largeIconResId = res.getIdentifier("ic_launcher", "mipmap", packageName);
}
Bitmap largeIconBitmap = BitmapFactory.decodeResource(res, largeIconResId);
if (largeIconResId != 0 && (largeIcon != null || Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)) {
notification.setLargeIcon(largeIconBitmap);
}
notification.setSmallIcon(smallIconResId);
String bigText = bundle.getString("bigText");
if (bigText == null) {
bigText = bundle.getString("message");
}
notification.setStyle(new NotificationCompat.BigTextStyle().bigText(bigText));
Intent intent = new Intent(context, intentClass);
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
bundle.putBoolean("userInteraction", true);
intent.putExtra("notification", bundle);
if (!bundle.containsKey("playSound") || bundle.getBoolean("playSound")) {
Uri soundUri = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION);
String soundName = bundle.getString("soundName");
if (soundName != null) {
if (!"default".equalsIgnoreCase(soundName)) {
// sound name can be full filename, or just the resource name.
// So the strings 'my_sound.mp3' AND 'my_sound' are accepted
// The reason is to make the iOS and android javascript interfaces compatible
int resId;
if (context.getResources().getIdentifier(soundName, "raw", context.getPackageName()) != 0) {
resId = context.getResources().getIdentifier(soundName, "raw", context.getPackageName());
} else {
soundName = soundName.substring(0, soundName.lastIndexOf('.'));
resId = context.getResources().getIdentifier(soundName, "raw", context.getPackageName());
}
soundUri = Uri.parse("android.resource://" + context.getPackageName() + "/" + resId);
}
}
notification.setSound(soundUri, AudioManager.STREAM_ALARM);
}
if (bundle.containsKey("ongoing") || bundle.getBoolean("ongoing")) {
notification.setOngoing(bundle.getBoolean("ongoing"));
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
notification.setCategory(NotificationCompat.CATEGORY_ALARM);
String color = bundle.getString("color");
if (color != null) {
notification.setColor(Color.parseColor(color));
}
}
int notificationID = Integer.parseInt(notificationIdString);
PendingIntent pendingIntent = PendingIntent.getActivity(context, notificationID, intent,
PendingIntent.FLAG_UPDATE_CURRENT);
NotificationManager notificationManager = notificationManager();
notification.setContentIntent(pendingIntent);
if (!bundle.containsKey("vibrate") || bundle.getBoolean("vibrate")) {
long vibration = bundle.containsKey("vibration") ? (long) bundle.getDouble("vibration") : DEFAULT_VIBRATION;
if (vibration == 0)
vibration = DEFAULT_VIBRATION;
notification.setVibrate(new long[]{0, vibration});
}
JSONArray actionsArray = null;
try {
actionsArray = bundle.getString("actions") != null ? new JSONArray(bundle.getString("actions")) : null;
} catch (JSONException e) {
Log.e(LOG_TAG, "Exception while converting actions to JSON object.", e);
}
if (actionsArray != null) {
// No icon for now. The icon value of 0 shows no icon.
int icon = 0;
// Add button for each actions.
for (int i = 0; i < actionsArray.length(); i++) {
String action;
try {
action = actionsArray.getString(i);
} catch (JSONException e) {
Log.e(LOG_TAG, "Exception while getting action from actionsArray.", e);
continue;
}
Intent actionIntent = new Intent(context, intentClass);
actionIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
actionIntent.setAction(context.getPackageName() + "." + action);
// Add "action" for later identifying which button gets pressed.
bundle.putString("action", action);
actionIntent.putExtra("notification", bundle);
PendingIntent pendingActionIntent = PendingIntent.getActivity(context, notificationID, actionIntent,
PendingIntent.FLAG_UPDATE_CURRENT);
notification.addAction(icon, action, pendingActionIntent);
}
}
// Remove the notification from the shared preferences once it has been shown
// to avoid showing the notification again when the phone is rebooted. If the
// notification is not removed, then every time the phone is rebooted, we will
// try to reschedule all the notifications stored in shared preferences and since
// these notifications will be in the past time, they will be shown immediately
// to the user which we shouldn't do. So, remove the notification from the shared
// preferences once it has been shown to the user. If it is a repeating notification
// it will be scheduled again.
if (scheduledNotificationsPersistence.getString(notificationIdString, null) != null) {
SharedPreferences.Editor editor = scheduledNotificationsPersistence.edit();
editor.remove(notificationIdString);
commit(editor);
}
Notification info = notification.build();
info.defaults |= Notification.DEFAULT_LIGHTS;
if (bundle.containsKey("tag")) {
String tag = bundle.getString("tag");
notificationManager.notify(tag, notificationID, info);
} else {
notificationManager.notify(notificationID, info);
}
// Can't use setRepeating for recurring notifications because setRepeating
// is inexact by default starting API 19 and the notifications are not fired
// at the exact time. During testing, it was found that notifications could
// late by many minutes.
this.scheduleNextNotificationIfRepeating(bundle);
} catch (Exception e) {
Log.e(LOG_TAG, "failed to send push notification", e);
}
}
private void scheduleNextNotificationIfRepeating(Bundle bundle) {
String repeatType = bundle.getString("repeatType");
long repeatTime = (long) bundle.getDouble("repeatTime");
if (repeatType != null) {
long fireDate = (long) bundle.getDouble("fireDate");
boolean validRepeatType = Arrays.asList("time", "week", "day", "hour", "minute").contains(repeatType);
// Sanity checks
if (!validRepeatType) {
Log.w(LOG_TAG, String.format("Invalid repeatType specified as %s", repeatType));
return;
}
if ("time".equals(repeatType) && repeatTime <= 0) {
Log.w(LOG_TAG, "repeatType specified as time but no repeatTime " +
"has been mentioned");
return;
}
long newFireDate = 0;
switch (repeatType) {
case "time":
newFireDate = fireDate + repeatTime;
break;
case "week":
newFireDate = fireDate + 7 * ONE_DAY;
break;
case "day":
newFireDate = fireDate + ONE_DAY;
break;
case "hour":
newFireDate = fireDate + ONE_HOUR;
break;
case "minute":
newFireDate = fireDate + ONE_MINUTE;
break;
}
// Sanity check, should never happen
if (newFireDate != 0) {
Log.d(LOG_TAG, String.format("Repeating notification with id %s at time %s",
bundle.getString("id"), Long.toString(newFireDate)));
bundle.putDouble("fireDate", newFireDate);
this.sendNotificationScheduled(bundle);
}
}
}
public void clearNotifications() {
Log.i(LOG_TAG, "Clearing alerts from the notification centre");
NotificationManager notificationManager = notificationManager();
notificationManager.cancelAll();
}
public void cancelAllScheduledNotifications() {
Log.i(LOG_TAG, "Cancelling all notifications");
for (String id : scheduledNotificationsPersistence.getAll().keySet()) {
cancelScheduledNotification(id);
}
}
public void cancelScheduledNotification(ReadableMap userInfo) {
for (String id : scheduledNotificationsPersistence.getAll().keySet()) {
try {
String notificationAttributesJson = scheduledNotificationsPersistence.getString(id, null);
Log.d(LOG_TAG, notificationAttributesJson);
RNPushNotificationAttributes notificationAttributes = fromJson(notificationAttributesJson);
if (notificationAttributes.matches(userInfo)) {
cancelScheduledNotification(id);
}
} catch (JSONException e) {
Log.w(LOG_TAG, "Problem dealing with scheduled notification " + id, e);
}
}
}
private void cancelScheduledNotification(String notificationIDString) {
Log.i(LOG_TAG, "Cancelling notification: " + notificationIDString);
// remove it from the alarm manger schedule
Bundle b = new Bundle();
b.putString("id", notificationIDString);
getAlarmManager().cancel(toScheduleNotificationIntent(b));
if (scheduledNotificationsPersistence.contains(notificationIDString)) {
// remove it from local storage
SharedPreferences.Editor editor = scheduledNotificationsPersistence.edit();
editor.remove(notificationIDString);
commit(editor);
} else {
Log.w(LOG_TAG, "Unable to find notification " + notificationIDString);
}
// removed it from the notification center
NotificationManager notificationManager = notificationManager();
notificationManager.cancel(Integer.parseInt(notificationIDString));
}
private NotificationManager notificationManager() {
return (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
}
private static void commit(SharedPreferences.Editor editor) {
if (Build.VERSION.SDK_INT < 9) {
editor.commit();
} else {
editor.apply();
}
}
}
| |
/**
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dmdl.windgate.util;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import com.asakusafw.dmdl.java.emitter.EmitContext;
import com.asakusafw.utils.java.model.syntax.Attribute;
import com.asakusafw.utils.java.model.syntax.ClassDeclaration;
import com.asakusafw.utils.java.model.syntax.Expression;
import com.asakusafw.utils.java.model.syntax.MethodDeclaration;
import com.asakusafw.utils.java.model.syntax.ModelFactory;
import com.asakusafw.utils.java.model.syntax.Name;
import com.asakusafw.utils.java.model.syntax.Type;
import com.asakusafw.utils.java.model.syntax.TypeBodyDeclaration;
import com.asakusafw.utils.java.model.syntax.WildcardBoundKind;
import com.asakusafw.utils.java.model.util.AttributeBuilder;
import com.asakusafw.utils.java.model.util.ExpressionBuilder;
import com.asakusafw.utils.java.model.util.JavadocBuilder;
import com.asakusafw.utils.java.model.util.Models;
import com.asakusafw.utils.java.model.util.TypeBuilder;
import com.asakusafw.vocabulary.external.ImporterDescription.DataSize;
import com.asakusafw.windgate.core.vocabulary.DataModelJdbcSupport;
/**
* Generates {@code JdbcImporterDescription} and {@code JdbcExporterDescription}.
* @since 0.7.3
*/
public final class JdbcProcessDescriptionGenerator {
// for reduce library dependencies
private static final String IMPORTER_TYPE_NAME =
"com.asakusafw.vocabulary.windgate.JdbcImporterDescription"; //$NON-NLS-1$
// for reduce library dependencies
private static final String EXPORTER_TYPE_NAME =
"com.asakusafw.vocabulary.windgate.JdbcExporterDescription"; //$NON-NLS-1$
private final EmitContext context;
private final Description description;
private final ModelFactory f;
private final boolean importer;
private JdbcProcessDescriptionGenerator(
EmitContext context,
Description description,
boolean importer) {
assert context != null;
assert description != null;
this.context = context;
this.f = context.getModelFactory();
this.importer = importer;
this.description = description;
}
/**
* Generates the class in the context.
* @param context the target emit context
* @param description the meta-description of target class
* @throws IOException if generation was failed by I/O error
*/
public static void generateImporter(EmitContext context, Description description) throws IOException {
JdbcProcessDescriptionGenerator generator = new JdbcProcessDescriptionGenerator(context, description, true);
generator.emit();
}
/**
* Generates the class in the context.
* @param context the target emit context
* @param description the meta-description of target class
* @throws IOException if generation was failed by I/O error
*/
public static void generateExporter(EmitContext context, Description description) throws IOException {
JdbcProcessDescriptionGenerator generator = new JdbcProcessDescriptionGenerator(context, description, false);
generator.emit();
}
private void emit() throws IOException {
ClassDeclaration decl = f.newClassDeclaration(
new JavadocBuilder(f)
.inline("{0} for {1}.",
d -> d.text(description.getDescription()),
d -> d.linkType(context.resolve(description.getModelClassName())))
.toJavadoc(),
getClassAttributes(),
context.getTypeName(),
context.resolve(Models.toName(f, importer ? IMPORTER_TYPE_NAME : EXPORTER_TYPE_NAME)),
Collections.emptyList(),
createMembers());
context.emit(decl);
}
private List<? extends Attribute> getClassAttributes() {
AttributeBuilder builder = new AttributeBuilder(f);
builder.Public();
if (description.getProfileName() == null
|| description.getTableName() == null
|| description.getColumnNames() == null
|| description.getSupportClassName() == null) {
builder.Abstract();
}
return builder.toAttributes();
}
private List<TypeBodyDeclaration> createMembers() {
List<TypeBodyDeclaration> results = new ArrayList<>();
results.add(createGetModelType());
if (description.getProfileName() != null) {
results.add(createGetProfileName());
}
if (description.getTableName() != null) {
results.add(createGetTableName());
}
if (description.getColumnNames() != null) {
results.add(createGetColumnNames());
}
if (description.getCondition() != null) {
results.add(createGetCondition());
}
if (description.getSupportClassName() != null) {
results.add(createGetJdbcSupport());
}
if (description.getDataSize() != null) {
results.add(createGetDataSize());
}
return results;
}
private MethodDeclaration createGetModelType() {
return createGetter(
new TypeBuilder(f, context.resolve(Class.class))
.parameterize(f.newWildcard(
WildcardBoundKind.UPPER_BOUNDED,
context.resolve(description.getModelClassName())))
.toType(),
"getModelType", //$NON-NLS-1$
f.newClassLiteral(context.resolve(description.getModelClassName())));
}
private MethodDeclaration createGetProfileName() {
return createGetter(
context.resolve(String.class),
"getProfileName", //$NON-NLS-1$
Models.toLiteral(f, description.getProfileName()));
}
private MethodDeclaration createGetJdbcSupport() {
return createGetter(
new TypeBuilder(f, context.resolve(Class.class))
.parameterize(f.newWildcard(
WildcardBoundKind.UPPER_BOUNDED,
new TypeBuilder(f, context.resolve(DataModelJdbcSupport.class))
.parameterize(f.newWildcard())
.toType()))
.toType(),
"getJdbcSupport", //$NON-NLS-1$
f.newClassLiteral(context.resolve(description.getSupportClassName())));
}
private MethodDeclaration createGetTableName() {
return createGetter(
context.resolve(String.class),
"getTableName", //$NON-NLS-1$
Models.toLiteral(f, description.getTableName()));
}
private MethodDeclaration createGetColumnNames() {
List<Expression> arguments = new ArrayList<>();
for (String name : description.getColumnNames()) {
arguments.add(Models.toLiteral(f, name));
}
return createGetter(
new TypeBuilder(f, context.resolve(List.class))
.parameterize(context.resolve(String.class))
.toType(),
"getColumnNames", //$NON-NLS-1$
new TypeBuilder(f, context.resolve(Arrays.class))
.method("asList", arguments) //$NON-NLS-1$
.toExpression());
}
private MethodDeclaration createGetCondition() {
return createGetter(
context.resolve(String.class),
"getCondition", //$NON-NLS-1$
Models.toLiteral(f, description.getCondition()));
}
private MethodDeclaration createGetDataSize() {
Type type = context.resolve(DataSize.class);
return createGetter(
type,
"getDataSize", //$NON-NLS-1$
new TypeBuilder(f, type)
.field(description.getDataSize().name())
.toExpression());
}
private MethodDeclaration createGetter(
com.asakusafw.utils.java.model.syntax.Type type,
String name,
Expression value) {
assert type != null;
assert name != null;
assert value != null;
return f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Public()
.toAttributes(),
type,
f.newSimpleName(name),
Collections.emptyList(),
Arrays.asList(new ExpressionBuilder(f, value).toReturnStatement()));
}
/**
* Represents the meta description.
* @since 0.7.0
*/
public static final class Description {
private final String description;
private final Name modelClassName;
private String tableName;
private List<String> columnNames;
private String condition;
private String profileName;
private Name supportClassName;
private DataSize dataSize;
/**
* Creates a new instance.
* @param description the textual description
* @param modelClassName the target data model class name
*/
public Description(String description, Name modelClassName) {
this.description = description;
this.modelClassName = modelClassName;
}
/**
* Returns the textual description for the target class.
* @return the description
*/
public String getDescription() {
return description;
}
/**
* Returns the data model class name.
* @return the data model class name
*/
public Name getModelClassName() {
return modelClassName;
}
/**
* Returns the profile name.
* @return the profile name, or {@code null} if it is not set
*/
public String getProfileName() {
return profileName;
}
/**
* Sets the profile name.
* @param value the value to set
*/
public void setProfileName(String value) {
this.profileName = value;
}
/**
* Returns table name.
* @return the path, or {@code null} if it is not set
*/
public String getTableName() {
return tableName;
}
/**
* Sets the table name.
* @param value the value to set
*/
public void setTableName(String value) {
this.tableName = value;
}
/**
* Returns the column names.
* @return the column names, or {@code null} if it is not set
*/
public List<String> getColumnNames() {
return columnNames;
}
/**
* Sets column names.
* @param value the value to set
*/
public void setColumnNames(List<String> value) {
this.columnNames = value;
}
/**
* Returns condition.
* @return the condition, or {@code null} if it is not set
*/
public String getCondition() {
return condition;
}
/**
* Sets the condition.
* @param value the value to set
*/
public void setCondition(String value) {
this.condition = value;
}
/**
* Returns the format class name.
* @return the format class name, or {@code null} if it is not set
*/
public Name getSupportClassName() {
return supportClassName;
}
/**
* Sets the format class name.
* @param value the value to set
*/
public void setSupportClassName(Name value) {
this.supportClassName = value;
}
/**
* Returns the data size.
* @return the data size, or {@code null} if it is not set
*/
public DataSize getDataSize() {
return dataSize;
}
/**
* Sets the data size.
* @param value the value to set
*/
public void setDataSize(DataSize value) {
this.dataSize = value;
}
}
}
| |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2003-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
* Franz Willer <franz.willer@gwi-ag.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.archive.ejb.entity;
import javax.ejb.CreateException;
import javax.ejb.EntityBean;
import org.apache.log4j.Logger;
import org.dcm4chex.archive.ejb.interfaces.AEDTO;
/**
* Application Entity bean.
*
* @author <a href="mailto:gunter@tiani.com">Gunter Zeilinger</a>
*
* @ejb.bean name="AE" type="CMP" view-type="local" primkey-field="pk"
* local-jndi-name="ejb/AE"
* @jboss.container-configuration name="Instance Per Transaction CMP 2.x EntityBean"
*
* @ejb.transaction type="Required"
*
* @ejb.persistence table-name="ae"
*
* @jboss.entity-command name="hsqldb-fetch-key"
*
* @ejb.finder
* signature="java.util.Collection findAll()"
* query="SELECT OBJECT(a) FROM AE AS a" transaction-type="Supports"
* @jboss.query
* signature="java.util.Collection findAll()"
* strategy="on-find"
* eager-load-group="*"
*
* @ejb.finder
* signature="org.dcm4chex.archive.ejb.interfaces.AELocal findByAET(java.lang.String aet)"
* query="SELECT OBJECT(a) FROM AE AS a WHERE a.title = ?1"
* transaction-type="Supports"
* @jboss.query
* signature="org.dcm4chex.archive.ejb.interfaces.AELocal findByAET(java.lang.String aet)"
* strategy="on-find"
* eager-load-group="*"
* @ejb.finder
* signature="java.util.Collection findByHostName(java.lang.String hostName)"
* query="SELECT OBJECT(a) FROM AE AS a WHERE a.hostName = ?1"
* transaction-type="Supports"
* @jboss.query
* signature="org.dcm4chex.archive.ejb.interfaces.AELocal findByHostName(java.lang.String hostName)"
* strategy="on-find"
* eager-load-group="*"
*
*
*/
public abstract class AEBean implements EntityBean {
private static final Logger log = Logger.getLogger(AEBean.class);
/**
* Auto-generated Primary Key
*
* @ejb.interface-method
* @ejb.pk-field
* @ejb.persistence column-name="pk"
* @jboss.persistence auto-increment="true"
*
*/
public abstract Long getPk();
public abstract void getPk(Long pk);
/**
* Application Entity Title
*
* @ejb.interface-method
* @ejb.persistence column-name="aet"
*/
public abstract String getTitle();
/**
* @ejb.interface-method
*/
public abstract void setTitle(String title);
/**
* @ejb.interface-method
* @ejb.persistence column-name="hostname"
*/
public abstract String getHostName();
/**
* @ejb.interface-method
*/
public abstract void setHostName(String name);
/**
* @ejb.interface-method
* @ejb.persistence column-name="port"
*/
public abstract int getPort();
/**
* @ejb.interface-method
*/
public abstract void setPort(int port);
/**
* @ejb.interface-method
* @ejb.persistence column-name="cipher_suites"
*/
public abstract String getCipherSuites();
/**
* @ejb.interface-method
*/
public abstract void setCipherSuites(String cipherSuites);
/**
* @ejb.interface-method
* @ejb.persistence column-name="pat_id_issuer"
*/
public abstract String getIssuerOfPatientID();
/**
* @ejb.interface-method
*/
public abstract void setIssuerOfPatientID(String issuer);
/**
* @ejb.interface-method
* @ejb.persistence column-name="acc_no_issuer"
*/
public abstract String getIssuerOfAccessionNumber();
/**
* @ejb.interface-method
*/
public abstract void setIssuerOfAccessionNumber(String issuer);
/**
* @ejb.interface-method
* @ejb.persistence column-name="station_name"
*/
public abstract String getStationName();
/**
* @ejb.interface-method
*/
public abstract void setStationName(String stationName);
/**
* @ejb.interface-method
* @ejb.persistence column-name="institution"
*/
public abstract String getInstitution();
/**
* @ejb.interface-method
*/
public abstract void setInstitution(String institution);
/**
* @ejb.interface-method
* @ejb.persistence column-name="department"
*/
public abstract String getDepartment();
/**
* @ejb.interface-method
*/
public abstract void setDepartment(String department);
/**
* @ejb.interface-method
* @ejb.persistence column-name="installed"
*/
public abstract boolean getInstalled();
/**
* @ejb.interface-method
*/
public abstract void setInstalled(boolean installed);
/**
* @ejb.interface-method
* @ejb.persistence column-name="user_id"
*/
public abstract String getUserID();
/**
* @ejb.interface-method
*/
public abstract void setUserID(String user);
/**
* @ejb.interface-method
* @ejb.persistence column-name="passwd"
*/
public abstract String getPassword();
/**
* @ejb.interface-method
*/
public abstract void setPassword(String passwd);
/**
* @ejb.interface-method
* @ejb.persistence column-name="fs_group_id"
*/
public abstract String getFileSystemGroupID();
/**
* @ejb.interface-method
*/
public abstract void setFileSystemGroupID(String id);
/**
* @ejb.interface-method
* @ejb.persistence column-name="ae_group"
*/
public abstract String getGroup();
/**
* @ejb.interface-method
*/
public abstract void setGroup(String group);
/**
* @ejb.interface-method
* @ejb.persistence column-name="ae_desc"
*/
public abstract String getDescription();
/**
* @ejb.interface-method
*/
public abstract void setDescription(String desc);
/**
* @ejb.interface-method
* @ejb.persistence column-name="wado_url"
*/
public abstract String getWadoURL();
/**
* @ejb.interface-method
*/
public abstract void setWadoURL(String desc);
/**
* @ejb.interface-method
* @ejb.persistence column-name="vendor_data"
*/
public abstract byte[] getVendorData();
/**
* @ejb.interface-method
*/
public abstract void setVendorData(byte[] vendorData);
/**
* @ejb.create-method
*/
public Long ejbCreate(AEDTO dto) throws CreateException {
if (log.isDebugEnabled()) {
log.debug("create AEBean(" + dto.getTitle() + ")");
}
update(dto);
return null;
}
public void ejbPostCreate(AEDTO dto) throws CreateException {
}
/**
* @ejb.interface-method
*/
public void update(AEDTO dto) {
setTitle(dto.getTitle().trim());
setHostName(dto.getHostName());
setPort(dto.getPort());
setCipherSuites(dto.getCipherSuitesAsString());
setIssuerOfPatientID(dto.getIssuerOfPatientID());
setIssuerOfAccessionNumber(dto.getIssuerOfAccessionNumberAsString());
setUserID(dto.getUserID());
setPassword(dto.getPassword());
setFileSystemGroupID(dto.getFileSystemGroupID());
setGroup(dto.getGroup());
setDescription(dto.getDescription());
setWadoURL(dto.getWadoURL());
setStationName(dto.getStationName());
setInstitution(dto.getInstitution());
setDepartment(dto.getDepartment());
setVendorData(dto.getVendorData());
setInstalled(dto.isInstalled());
}
/**
* @ejb.interface-method
* @ejb.transaction type="Supports"
*/
public AEDTO toDTO() {
AEDTO dto = new AEDTO();
dto.setPk(getPk().longValue());
dto.setTitle(getTitle());
dto.setHostName(getHostName());
dto.setPort(getPort());
dto.setCipherSuitesAsString(getCipherSuites());
dto.setIssuerOfPatientID(getIssuerOfPatientID());
dto.setIssuerOfAccessionNumberAsString(getIssuerOfAccessionNumber());
dto.setUserID(getUserID());
dto.setPassword(getPassword());
dto.setFileSystemGroupID(getFileSystemGroupID());
dto.setGroup(getGroup());
dto.setDescription(getDescription());
dto.setWadoURL(getWadoURL());
dto.setStationName(getStationName());
dto.setInstitution(getInstitution());
dto.setDepartment(getDepartment());
dto.setVendorData(getVendorData());
dto.setInstalled(getInstalled());
return dto;
}
/**
* @ejb.interface-method
* @ejb.transaction type="Supports"
*/
public String asString() {
StringBuffer sb = new StringBuffer(64);
sb.append(getProtocol()).append("://").append(getTitle()).append('@')
.append(getHostName()).append(':').append(getPort());
return sb.toString();
}
private String getProtocol() {
String cipherSuites = getCipherSuites();
if (cipherSuites == null || cipherSuites.length() == 0) {
return "dicom";
}
if ("SSL_RSA_WITH_NULL_SHA".equals(cipherSuites)) {
return "dicom-tls.nodes";
}
if ("SSL_RSA_WITH_3DES_EDE_CBC_SHA".equals(cipherSuites)) {
return "dicom-tls.3des";
}
if ("TLS_RSA_WITH_AES_128_CBC_SHA,SSL_RSA_WITH_3DES_EDE_CBC_SHA"
.equals(cipherSuites)) {
return "dicom-tls.aes";
}
return "dicom-tls";
}
}
| |
/**
* AlertWindow is the class that load what tasks are overdue in x days
* and display to user a new window if the user click on the dialog when
* Geekdo launch
*
* @author Steve Ng
* @version 0.2
* @since 2010-10-30
*
* Note to Developers: Edit this class to add more functionalities such as
* completing a task or deleting a task from this dialog.
*
*/
package GUI;
import java.util.Calendar;
import logic.Controller;
import objects.Result;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.MouseMoveListener;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.custom.ScrolledComposite;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.ToolBar;
import org.eclipse.swt.widgets.ToolItem;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.wb.swt.SWTResourceManager;
public class AlertWindow extends Composite {
private static Shell alertWindowDialog;
private static int noOfTasksInAlertWindow = 0;
//variable that alertWindow check before it open itself
private static boolean canDialogOpen = false;
//variables for the length and height of the dialog
private static final int X_LENGTH_OF_GUI = 310;
private static final int Y_LENGTH_OF_GUI = 318;
// variables for draganddrop capabilities of the composite
private boolean isMouseNotClicked = true;
private Point locationOfBeforeClicking;
// Final variables for RGB colors yellow for tasks that are not overdue
private static final int YELLOW_R = 255;
private static final int YELLOW_G = 247;
private static final int YELLOW_B = 153;
//year stored in backend to indicate no year set
private static final int NO_YEAR_SET = 3000;
public AlertWindow(Composite parent, int style) {
super(parent, style);
alertWindowDialog = new Shell(getDisplay(), SWT.NONE);
alertWindowDialog.setSize(X_LENGTH_OF_GUI,Y_LENGTH_OF_GUI);
alertWindowDialog.setBackgroundMode(SWT.INHERIT_DEFAULT);
alertWindowDialog.setBackgroundImage(SWTResourceManager.getImage(UserInterface.class, "/GUI/Alertwindow_background.png"));
createTopCompositeAndItsControls();
createMidCompositeAndItsControls();
if(canDialogOpen)
{
alertWindowDialog.pack();
alertWindowDialog.open();
}
}
// #############################################################################################
// Second level abstraction
// #############################################################################################
private void createTopCompositeAndItsControls() {
//creation of topcomposite
Composite topComposite = new Composite(alertWindowDialog, SWT.NONE);
topComposite.setBounds(0, 0, 308, 50);
topComposite.setBackgroundMode(SWT.INHERIT_DEFAULT);
createDragCapabilities(topComposite);
//creation of close button
ToolBar toolBarForCloseButton = new ToolBar(topComposite, SWT.FLAT);
toolBarForCloseButton.setBackground(SWTResourceManager.getColor(SWT.TRANSPARENT));
toolBarForCloseButton.setBounds(X_LENGTH_OF_GUI-50,7, 40, 40);
ToolItem buttonClose = new ToolItem(toolBarForCloseButton, SWT.NONE);
buttonClose.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
alertWindowDialog.close();
}
});
buttonClose.setImage(SWTResourceManager.getImage(UserInterface.class, "/GUI/button_close.png"));
//creation of alert window dialog
Label lblTitle = new Label(topComposite, SWT.NONE);
lblTitle.setFont(SWTResourceManager.getFont("Segoe UI Semibold", 13, SWT.NORMAL));
lblTitle.setForeground(SWTResourceManager.getColor(255,255,0));
lblTitle.setBounds(10, 14, 215, 25);
lblTitle.setText("Tasks that are due in "+UserInterface.getNoOfDaysForAlertReminder() + " days");
createDragCapabilities(lblTitle);
}
private void createMidCompositeAndItsControls() {
noOfTasksInAlertWindow =0;
//creation of the scrolledComposite
ScrolledComposite midScrolledComposite = new ScrolledComposite(alertWindowDialog, SWT.V_SCROLL);
midScrolledComposite.setBounds(0, 46, X_LENGTH_OF_GUI, Y_LENGTH_OF_GUI-46);
midScrolledComposite.setExpandHorizontal(true);
midScrolledComposite.setExpandVertical(true);
//creation of the main composite inside scrolled composite
Composite midComposite = new Composite(midScrolledComposite, SWT.NONE);
midComposite.setBackgroundImage(SWTResourceManager.getImage(UserInterface.class, "/GUI/GUI_background.png"));
midComposite.setBackgroundMode(SWT.INHERIT_DEFAULT);
//creation of the "Task Description" title
Label lblTaskDescription = new Label(midComposite, SWT.NONE);
lblTaskDescription.setFont(SWTResourceManager.getFont("Segoe UI Semibold", 11, SWT.NORMAL));
lblTaskDescription.setForeground(SWTResourceManager.getColor(YELLOW_R,YELLOW_G,YELLOW_B));
lblTaskDescription.setBounds(10, 13, 115, 20);
lblTaskDescription.setText("Task Description");
//creation of the "End Date" title
Label lblEndDate = new Label(midComposite, SWT.NONE);
lblEndDate.setFont(SWTResourceManager.getFont("Segoe UI Semibold", 11, SWT.NORMAL));
lblEndDate.setForeground(SWTResourceManager.getColor(YELLOW_R ,YELLOW_G ,YELLOW_B ));
lblEndDate.setBounds(203, 13, 104, 20);
lblEndDate.setText("End Date");
loadAllAlertTasks(midComposite);
midScrolledComposite.setContent(midComposite);
midScrolledComposite.setMinSize(midComposite.computeSize(SWT.DEFAULT, SWT.DEFAULT));
}
// #############################################################################################
// Third level abstraction
// #############################################################################################
/**
* Method allow the composite to be with drag capabilities
* @param composite
*/
private void createDragCapabilities(final Control composite) {
composite.addMouseListener(new MouseAdapter() {
@Override
public void mouseDown(MouseEvent e) {
isMouseNotClicked = false;
locationOfBeforeClicking = getDisplay().getCursorLocation();
}
@Override
public void mouseUp(MouseEvent e) {
isMouseNotClicked = true;
}
});
composite.addMouseMoveListener(new MouseMoveListener() {
@Override
public void mouseMove(MouseEvent arg0) {
if (isMouseNotClicked == false) {
Point locationOfNewWindow = getDisplay().getCursorLocation();
Point shellLocation = getDisplay().getActiveShell().getLocation();
locationOfNewWindow = new Point(
shellLocation.x
+ (locationOfNewWindow.x - locationOfBeforeClicking.x),
shellLocation.y
+ (locationOfNewWindow.y - locationOfBeforeClicking.y));
getDisplay().getActiveShell().setLocation(locationOfNewWindow);
locationOfBeforeClicking = getDisplay().getCursorLocation();
}
}
});
}
private void loadAllAlertTasks(Composite midComposite) {
Result allOpenTasks = Controller.getAllOpenTasks();
int userDefinedDaysForAlert = UserInterface.getNoOfDaysForAlertReminder();
Calendar todayDate = Calendar.getInstance();
int todayDateMonth = todayDate.get(Calendar.MONTH);
int todayDateDay = todayDate.get(Calendar.DATE);
int todayDateYear = todayDate.get(Calendar.YEAR);
todayDate.clear();
todayDate.set(todayDateYear,todayDateMonth,todayDateDay);
populateTaskList(midComposite, allOpenTasks,
userDefinedDaysForAlert, todayDate, todayDateYear);
}
// #############################################################################################
// Fourth level abstraction
// #############################################################################################
/**
* Method loop through the task list and check for tasks with due date
* within the range to be displayed into the alert window
*/
private void populateTaskList(Composite midComposite,
Result allOpenTasks, int userDefinedDaysForAlert,
Calendar todayDate, int todayDateYear) {
for(int i =0;i<allOpenTasks.getData().size();i++)
{
Calendar taskEndDate = allOpenTasks.getData().get(i).getEndDate();
int taskEndDateMonth = taskEndDate.get(Calendar.MONTH);
int taskEndDateDay = taskEndDate.get(Calendar.DATE);
int taskEndDateYear = taskEndDate.get(Calendar.YEAR);
if(taskEndDateYear ==NO_YEAR_SET)
taskEndDateYear=todayDateYear;
taskEndDate.clear();
taskEndDate.set(taskEndDateYear,taskEndDateMonth,taskEndDateDay);
int dayBetween = daysBetween(todayDate,taskEndDate);
if(dayBetween <= userDefinedDaysForAlert && dayBetween >-1)
{
//creation of the task description label
Label lblTaskDescription = new Label(midComposite, SWT.NONE);
lblTaskDescription.setBounds(10, 40 + noOfTasksInAlertWindow*30, 180, 20);
lblTaskDescription.setFont(SWTResourceManager.getFont("Segoe UI", 10, SWT.NORMAL));
lblTaskDescription.setForeground(SWTResourceManager.getColor(YELLOW_R ,YELLOW_G ,YELLOW_B ));
lblTaskDescription.setText(allOpenTasks.getData().get(i).getDetails());
//creation of the end date label
Label lblEndDate = new Label(midComposite, SWT.NONE);
lblEndDate.setBounds(203,40 + noOfTasksInAlertWindow*30, 65, 20);
lblEndDate.setFont(SWTResourceManager.getFont("Segoe UI", 10, SWT.NORMAL));
lblEndDate.setForeground(SWTResourceManager.getColor(YELLOW_R ,YELLOW_G ,YELLOW_B ));
setEndDateLabelBasedOnDayBetween(lblEndDate,
taskEndDateMonth, taskEndDateDay, dayBetween);
noOfTasksInAlertWindow++;
}
}
}
/**
* Method check the day between for todayDate and the task end date
* and set the end date label accordingly to the day between
*
*/
private void setEndDateLabelBasedOnDayBetween(Label lblEndDate,
int taskEndDateMonth, int taskEndDateDay, int dayBetween) {
if(dayBetween ==0)
lblEndDate.setText("Today");
else if(dayBetween ==1)
lblEndDate.setText("Tomorrow");
else if(dayBetween < 8)
lblEndDate.setText("In "+dayBetween+" days");
else
lblEndDate.setText(taskEndDateDay + "/" + (taskEndDateMonth+1));
}
/**
* Method calculate the daysBetween 2 given date
*
* @return -1 if todayDate is after taskEndDate
* @return else return the number of days differences between 2 given date
*
*/
private int daysBetween(Calendar todayDate, Calendar taskEndDate){
if (todayDate.after(taskEndDate)) { // swap dates so that d1 is start and d2 is end
return -1;
}
return (int) ((taskEndDate.getTime().getTime() - todayDate.getTime().getTime())/(1000*60*60*24));
}
// #############################################################################################
// Package GUI API
// #############################################################################################
public static void setSwitchToOpen(boolean setSwitchToOpen)
{
canDialogOpen = setSwitchToOpen;
}
public static int getNoOfTasksInAlertWindow()
{
return noOfTasksInAlertWindow;
}
}
| |
package pl.poznan.put.promethee.xmcda;
import org.xmcda.*;
import org.xmcda.Alternative;
import org.xmcda.CategoriesProfiles;
import org.xmcda.CategoriesValues;
import org.xmcda.Category;
import org.xmcda.CategoryProfile;
import org.xmcda.Criterion;
import org.xmcda.XMCDA;
import org.xmcda.utils.Coord;
import pl.poznan.put.promethee.exceptions.InputDataException;
import java.math.BigDecimal;
import java.util.*;
import java.util.stream.Collectors;
/**
* Created by Maciej Uniejewski on 2016-11-11.
*/
public class InputsHandler {
private InputsHandler() {
}
public static class Inputs {
private List<String> alternativesIds;
private List<String> categoriesIds;
private List<String> profilesIds;
private List<String> criteriaIds;
private Boolean assignToABetterClass;
private Boolean useMarginalValue;
private Map<String, BigDecimal> criteriaWeights;
private Map<String, Integer> categoriesRanking;
private List<CategoryProfile> categoryProfiles;
private Map<String, Map<String, Map<String, BigDecimal>>> partialPreferences;
public List<String> getAlternativesIds() {
return alternativesIds;
}
public void setAlternativesIds(List<String> alternativesIds) {
this.alternativesIds = alternativesIds;
}
public List<String> getCategoriesIds() {
return categoriesIds;
}
public void setCategoriesIds(List<String> categoriesIds) {
this.categoriesIds = categoriesIds;
}
public List<String> getProfilesIds() {
return profilesIds;
}
public void setProfilesIds(List<String> profilesIds) {
this.profilesIds = profilesIds;
}
public Boolean getAssignToABetterClass() {
return assignToABetterClass;
}
public void setAssignToABetterClass(Boolean assignToABetterClass) {
this.assignToABetterClass = assignToABetterClass;
}
public Map<String, Integer> getCategoriesRanking() {
return categoriesRanking;
}
public void setCategoriesRanking(Map<String, Integer> categoriesRanking) {
this.categoriesRanking = categoriesRanking;
}
public List<CategoryProfile> getCategoryProfiles() {
return categoryProfiles;
}
public void setCategoryProfiles(List<CategoryProfile> categoryProfiles) {
this.categoryProfiles = categoryProfiles;
}
public Map<String, Map<String, Map<String, BigDecimal>>> getPartialPreferences() {
return partialPreferences;
}
public void setPartialPreferences(Map<String, Map<String, Map<String, BigDecimal>>> partialPreferences) {
this.partialPreferences = partialPreferences;
}
public List<String> getCriteriaIds() {
return criteriaIds;
}
public void setCriteriaIds(List<String> criteriaIds) {
this.criteriaIds = criteriaIds;
}
public Boolean getUseMarginalValue() {
return useMarginalValue;
}
public void setUseMarginalValue(Boolean useMarginalValue) {
this.useMarginalValue = useMarginalValue;
}
public Map<String, BigDecimal> getCriteriaWeights() {
return criteriaWeights;
}
public void setCriteriaWeights(Map<String, BigDecimal> criteriaWeights) {
this.criteriaWeights = criteriaWeights;
}
}
public static Inputs checkAndExtractInputs(XMCDA xmcda, ProgramExecutionResult xmcdaExecResults) {
Inputs inputsDict = checkInputs(xmcda, xmcdaExecResults);
if (xmcdaExecResults.isError())
return null;
return inputsDict;
}
protected static Inputs checkInputs(XMCDA xmcda, ProgramExecutionResult errors) {
Inputs inputs = new Inputs();
try {
checkAndExtractAlternatives(inputs, xmcda, errors);
checkAndExtractParameters(inputs, xmcda, errors);
checkAndExtractCategories(inputs, xmcda, errors);
checkCategoriesRanking(inputs, xmcda, errors);
checkAndExtractProfilesIds(inputs, xmcda, errors);
checkAndExtractCriteria(inputs, xmcda, errors);
checkAndExtractPartialPreferences(inputs, xmcda, errors);
} catch (InputDataException exception) {
//Just catch the exceptions and skip other functions
}
return inputs;
}
protected static void checkAndExtractAlternatives(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
if (xmcda.alternatives.isEmpty()) {
String errorMessage = "No alternatives list has been supplied.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
List<String> alternativesIds = xmcda.alternatives.getActiveAlternatives().stream().filter(a -> "alternatives".equals(a.getMarker())).map(
Alternative::id).collect(Collectors.toList());
if (alternativesIds.isEmpty()) {
String errorMessage = "The alternatives list can not be empty.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
inputs.setAlternativesIds(alternativesIds);
}
protected static void checkAndExtractParameters(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
if (xmcda.programParametersList.size() > 1) {
String errorMessage = "Only one programParameter is expected.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
if (xmcda.programParametersList.isEmpty()) {
String errorMessage = "No programParameter found.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
if (xmcda.programParametersList.get(0).size() != 2) {
String errorMessage = "Parameter's list must contain exactly two elements.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
checkAndExtractAssignToABetterClass(inputs, xmcda, errors);
checkAndExtractUseMarginalValue(inputs, xmcda, errors);
}
protected static void checkAndExtractAssignToABetterClass(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
Boolean assignToABetterClass;
final ProgramParameter<?> prgParam1 = xmcda.programParametersList.get(0).get(0);
if (!"assignToABetterClass".equalsIgnoreCase(prgParam1.id())) {
String errorMessage = String.format("Invalid parameter w/ id '%s'", prgParam1.id());
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
if (prgParam1.getValues() == null || (prgParam1.getValues() != null && prgParam1.getValues().size() != 1)) {
String errorMessage = "Parameter assignToABetterClass must have a single (boolean) value only";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
try {
assignToABetterClass = (Boolean) prgParam1.getValues().get(0).getValue();
if (assignToABetterClass == null) {
String errorMessage = "Invalid value for parameter assignToABetterClass, it must be true or false.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
inputs.setAssignToABetterClass(assignToABetterClass);
} catch (InputDataException e) {
throw e;
} catch (Exception exception) {
String err = "Invalid value for parameter assignToABetterClass, it must be true or false.";
errors.addError(err);
throw new InputDataException(err);
}
}
protected static void checkAndExtractUseMarginalValue(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
Boolean useMarginalValue;
final ProgramParameter<?> prgParam = xmcda.programParametersList.get(0).get(1);
if (!"useMarginalValue".equalsIgnoreCase(prgParam.id())) {
String errorMessage = String.format("Invalid parameter w/ id '%s'", prgParam.id());
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
if (prgParam.getValues() == null || (prgParam.getValues() != null && prgParam.getValues().size() != 1)) {
String errorMessage = "Parameter useMarginalValue must have a single (boolean) value only";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
try {
useMarginalValue = (Boolean) prgParam.getValues().get(0).getValue();
if (useMarginalValue == null) {
String errorMessage = "Invalid value for parameter useMarginalValue, it must be true or false.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
inputs.setUseMarginalValue(useMarginalValue);
} catch (InputDataException e) {
throw e;
} catch (Exception exception) {
String err = "Invalid value for parameter useMarginalValue, it must be true or false.";
errors.addError(err);
throw new InputDataException(err);
}
}
protected static void checkAndExtractCategories(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
if (xmcda.categories.isEmpty()) {
String errorMessage = "No categories has been supplied.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
if (xmcda.categories.size() == 1) {
String errorMessage = "You should supply at least 2 categories.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
List<String> categories = xmcda.categories.getActiveCategories().stream().filter(a -> "categories".equals(a.getMarker())).map(
Category::id).collect(Collectors.toList());
inputs.setCategoriesIds(categories);
if (categories.isEmpty()) {
String errorMessage = "The category list can not be empty.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
}
protected static void checkCategoriesRanking(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
if (xmcda.categoriesValuesList.isEmpty()) {
String errorMessage = "No categories values list has been supplied";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
if (xmcda.categoriesValuesList.size() > 1) {
String errorMessage = "More than one categories values list has been supplied";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
CategoriesValues categoriesValuesList = xmcda.categoriesValuesList.get(0);
if (!categoriesValuesList.isNumeric()) {
String errorMessage = "Each of the categories ranks must be integer";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
Map<String, Integer> categoriesValues = new LinkedHashMap<>();
try {
CategoriesValues<Integer> categoriesValuesClass = categoriesValuesList.convertTo(Integer.class);
xmcda.categoriesValuesList.set(0, categoriesValuesClass);
int min = Integer.MAX_VALUE;
int max = -1;
for (Map.Entry<Category, LabelledQValues<Integer>> a : categoriesValuesClass.entrySet()) {
if (a.getValue().get(0).getValue() < min) {
min = a.getValue().get(0).getValue();
}
if (a.getValue().get(0).getValue() > max) {
max = a.getValue().get(0).getValue();
}
categoriesValues.put(a.getKey().id(), a.getValue().get(0).getValue());
}
if (min != 1) {
String errorMessage = "Minimal rank should be equal to 1.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
if (max != inputs.getCategoriesIds().size()) {
String errorMessage = "Maximal rank should be equal to number of categories.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
for (Map.Entry<String, Integer> categoryA : categoriesValues.entrySet()) {
for (Map.Entry<String, Integer> categoryB : categoriesValues.entrySet()) {
if (categoryA.getValue().intValue() == categoryB.getValue() && !categoryA.getKey().equals(categoryB.getKey())) {
String errorMessage = "There can not be two categories with the same rank.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
}
}
inputs.setCategoriesRanking(categoriesValues);
} catch (InputDataException e) {
throw e;
} catch (Exception e) {
String errorMessage = "An error occurred: " + e + ". Remember that each rank has to be integer.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
}
protected static void checkAndExtractProfilesIds(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
inputs.setProfilesIds(new ArrayList<>());
if (xmcda.categoriesProfilesList.isEmpty()) {
String errorMessage = "No categories profiles list has been supplied";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
if (xmcda.categoriesProfilesList.size() > 1) {
String errorMessage = "You can not supply more then 1 categories profiles list";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
inputs.setCategoryProfiles(new ArrayList<>());
CategoriesProfiles categoriesProfiles = xmcda.categoriesProfilesList.get(0);
if (inputs.getCategoriesRanking().size() != categoriesProfiles.size()) {
String errorMessage = "There is a problem with categories rank list or categories profiles list. Each category has to be added to categories profiles list.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
for (Object profile : categoriesProfiles) {
CategoryProfile tmpProfile = (CategoryProfile) profile;
if (!"central".equalsIgnoreCase(tmpProfile.getType().name())) {
String errorMessage = "There is a problem with categories rank list or categories profiles list. You need to provide central profiles for categories.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
inputs.getCategoryProfiles().add(tmpProfile);
}
Collections.sort(inputs.getCategoryProfiles(), (left, right) -> Integer.compare(inputs.getCategoriesRanking().get(left.getCategory().id()), inputs.getCategoriesRanking().get(right.getCategory().id())));
inputs.setProfilesIds(new ArrayList<>());
checkAndExtractCentralProfilesIds(errors, inputs);
}
protected static void checkAndExtractCentralProfilesIds(ProgramExecutionResult errors, InputsHandler.Inputs inputs) throws InputDataException {
for (int j = 0; j < inputs.getCategoryProfiles().size(); j++) {
if (inputs.getCategoryProfiles().get(j).getCentralProfile() != null) {
inputs.getProfilesIds().add(inputs.getCategoryProfiles().get(j).getCentralProfile().getAlternative().id());
} else {
String errorMessage = "There is a problem with categories profiles. You need to provide central for categories.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
}
}
protected static void checkAndExtractCriteria(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
if (xmcda.criteria.getActiveCriteria().isEmpty()) {
String errorMessage = "You need to provide a not empty criteria list.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
inputs.setCriteriaIds(xmcda.criteria.getActiveCriteria().stream().filter(a -> "criteria".equals(a.getMarker())).map(
Criterion::id).collect(Collectors.toList()));
checkAndExtractCriteriaWeights(inputs, xmcda, errors);
}
protected static void checkAndExtractCriteriaWeights(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
if (xmcda.criteriaValuesList.size() != 1) {
String errorMessage = "You need to provide 1 alternatives values list for criteria weights.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
checkAndExtractCriteriaFromList(inputs, xmcda, errors);
}
protected static void checkAndExtractCriteriaFromList(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
inputs.setCriteriaWeights(new LinkedHashMap<>());
org.xmcda.CriteriaValues criteriaWeights = xmcda.criteriaValuesList.get(0);
if (!criteriaWeights.isNumeric()) {
String errorMessage = "Each criterion weight must have numeric type.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
try {
for (Map.Entry<Criterion, LabelledQValues<Double>> weight : ((Map<Criterion, LabelledQValues<Double>>) criteriaWeights).entrySet()) {
Double tmpValue = weight.getValue().get(0).convertToDouble().getValue();
BigDecimal bigDecimalValue = BigDecimal.valueOf(tmpValue);
inputs.getCriteriaWeights().put(weight.getKey().id(), bigDecimalValue);
}
} catch (Exception exception) {
String errorMessage = "An error occurred: " + exception + ". Each flow must have numeric type.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
checkMissingValuesInCriteriaWeights(inputs, errors);
}
protected static void checkMissingValuesInCriteriaWeights(Inputs inputs, ProgramExecutionResult errors) throws InputDataException {
for (int j = 0; j < inputs.getCriteriaIds().size(); j++) {
String criterionId = inputs.getCriteriaIds().get(j);
if (!inputs.getCriteriaWeights().containsKey(criterionId)) {
String errorMessage = "There are some missing values in criteria weights.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
}
}
protected static void checkAndExtractPartialPreferences(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
if (xmcda.alternativesMatricesList.size() != 1) {
String errorMessage = "You need to provide 1 alternatives values list for partial preferences.";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
checkAndExtractPreferences(inputs, xmcda, errors);
}
protected static void checkAndExtractPreferences(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException {
inputs.partialPreferences = new LinkedHashMap<>();
@SuppressWarnings("unchecked")
AlternativesMatrix<Double> matrix = (AlternativesMatrix<Double>) xmcda.alternativesMatricesList.get(0);
List<String> alternativesAndProfiles = new ArrayList<>();
alternativesAndProfiles.addAll(inputs.getAlternativesIds());
alternativesAndProfiles.addAll(inputs.getProfilesIds());
for (String alternative : inputs.getAlternativesIds()) {
for (String profile : inputs.getProfilesIds()) {
putPreferencesIntoMap(inputs, errors, matrix, alternative, profile);
}
}
for (String profile : inputs.getProfilesIds()) {
for (String alternativeProfile : alternativesAndProfiles) {
putPreferencesIntoMap(inputs, errors, matrix, profile, alternativeProfile);
}
}
}
private static void putPreferencesIntoMap(Inputs inputs, ProgramExecutionResult errors,
AlternativesMatrix<Double> matrix, String first, String second) throws InputDataException {
inputs.partialPreferences.putIfAbsent(first, new LinkedHashMap<>());
inputs.partialPreferences.get(first).putIfAbsent(second, new LinkedHashMap<>());
Alternative alt1 = new Alternative(first);
Alternative alt2 = new Alternative(second);
Coord<Alternative, Alternative> coord = new Coord<>(alt1, alt2);
QualifiedValues<Double> values = matrix.getOrDefault(coord, null);
if (values == null) {
if (!first.equals(second)) {
String errorMessage = "List of partial preferences does not contain value for coord (" + first + "," + second + ")";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
return;
}
if (values.size() != inputs.criteriaIds.size()) {
String errorMessage = "List of partial preferences does not contain correct criteria list";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
for (QualifiedValue<Double> value : values) {
if (inputs.criteriaIds.contains(value.id())) {
if (inputs.partialPreferences.get(first).get(second).containsKey(value.id())) {
String errorMessage = "List of partial preferences contains duplicates of criteria";
errors.addError(errorMessage);
throw new InputDataException(errorMessage);
}
BigDecimal bigDecimalValue = BigDecimal.valueOf(value.getValue());
inputs.partialPreferences.get(first).get(second).put(value.id(), bigDecimalValue);
}
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticmapreduce.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* The list of supported product configurations which allow user-supplied arguments. EMR accepts these arguments and
* forwards them to the corresponding installation script as bootstrap action arguments.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/SupportedProductConfig"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SupportedProductConfig implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The name of the product configuration.
* </p>
*/
private String name;
/**
* <p>
* The list of user-supplied arguments.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> args;
/**
* <p>
* The name of the product configuration.
* </p>
*
* @param name
* The name of the product configuration.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the product configuration.
* </p>
*
* @return The name of the product configuration.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the product configuration.
* </p>
*
* @param name
* The name of the product configuration.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SupportedProductConfig withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The list of user-supplied arguments.
* </p>
*
* @return The list of user-supplied arguments.
*/
public java.util.List<String> getArgs() {
if (args == null) {
args = new com.amazonaws.internal.SdkInternalList<String>();
}
return args;
}
/**
* <p>
* The list of user-supplied arguments.
* </p>
*
* @param args
* The list of user-supplied arguments.
*/
public void setArgs(java.util.Collection<String> args) {
if (args == null) {
this.args = null;
return;
}
this.args = new com.amazonaws.internal.SdkInternalList<String>(args);
}
/**
* <p>
* The list of user-supplied arguments.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setArgs(java.util.Collection)} or {@link #withArgs(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param args
* The list of user-supplied arguments.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SupportedProductConfig withArgs(String... args) {
if (this.args == null) {
setArgs(new com.amazonaws.internal.SdkInternalList<String>(args.length));
}
for (String ele : args) {
this.args.add(ele);
}
return this;
}
/**
* <p>
* The list of user-supplied arguments.
* </p>
*
* @param args
* The list of user-supplied arguments.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SupportedProductConfig withArgs(java.util.Collection<String> args) {
setArgs(args);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getArgs() != null)
sb.append("Args: ").append(getArgs());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SupportedProductConfig == false)
return false;
SupportedProductConfig other = (SupportedProductConfig) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getArgs() == null ^ this.getArgs() == null)
return false;
if (other.getArgs() != null && other.getArgs().equals(this.getArgs()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getArgs() == null) ? 0 : getArgs().hashCode());
return hashCode;
}
@Override
public SupportedProductConfig clone() {
try {
return (SupportedProductConfig) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.elasticmapreduce.model.transform.SupportedProductConfigMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.