gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.common.dao.service;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.kaaproject.kaa.common.dto.ApplicationDto;
import org.kaaproject.kaa.common.dto.HasId;
import org.kaaproject.kaa.common.dto.TenantDto;
import org.kaaproject.kaa.common.dto.ctl.CTLSchemaDto;
import org.kaaproject.kaa.common.dto.ctl.CTLSchemaMetaInfoDto;
import org.kaaproject.kaa.server.common.dao.AbstractTest;
import org.kaaproject.kaa.server.common.dao.exception.DatabaseProcessingException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
@Ignore("This test should be extended and initialized with proper context in each NoSQL submodule")
public class CTLServiceImplTest extends AbstractTest {
private ExecutorService executorService = Executors.newFixedThreadPool(10);
private TenantDto tenant;
private ApplicationDto appDto;
private ApplicationDto appDto2;
private CTLSchemaDto firstSchema;
private CTLSchemaDto secondSchema;
private CTLSchemaDto thirdSchema;
private CTLSchemaDto fourthSchema;
private CTLSchemaDto mainSchema;
private CTLSchemaDto defaultSystemSchema;
private CTLSchemaDto systemSchema;
private CTLSchemaDto tenantSchema;
private CTLSchemaDto tenantSchema2;
private CTLSchemaDto appSchema;
private CTLSchemaDto app2Schema;
private CTLSchemaDto appSchema2;
private CTLSchemaDto appSchema3;
private static final String TEST_CTL_SCHEMA_ALPHA = "dao/ctl/alpha.json";
private static final String TEST_CTL_SCHEMA_ALPHA_FLAT = "dao/ctl/alphaFlat.json";
private static final String TEST_CTL_SCHEMA_BETA = "dao/ctl/beta.json";
private static final String TEST_CTL_SCHEMA_GAMMA = "dao/ctl/gamma.json";
private CTLSchemaDto alpha;
private CTLSchemaDto beta;
private CTLSchemaDto gamma;
@Before
public void before() throws Exception {
clearDBData();
if (tenant == null) {
tenant = userService.findTenantByName(SUPER_TENANT);
if (tenant == null) {
TenantDto tn = new TenantDto();
tn.setName(SUPER_TENANT);
tenant = userService.saveTenant(tn);
appDto = generateApplicationDto(tenant.getId(), "The app 1");
appDto2 = generateApplicationDto(tenant.getId(), "The app 2");
List<CTLSchemaDto> ctlSchemas = ctlService.findSystemCTLSchemas();
defaultSystemSchema = ctlSchemas.get(0);
}
}
Set<CTLSchemaDto> dependency = new HashSet<>();
firstSchema = ctlService.saveCTLSchema(generateCTLSchemaDto(DEFAULT_FQN+1, tenant.getId(), null, 1));
dependency.add(firstSchema);
secondSchema = ctlService.saveCTLSchema(generateCTLSchemaDto(DEFAULT_FQN+2, tenant.getId(), null, 2));
dependency.add(secondSchema);
thirdSchema = ctlService.saveCTLSchema(generateCTLSchemaDto(DEFAULT_FQN+3, tenant.getId(), null, 3));
dependency.add(thirdSchema);
fourthSchema = ctlService.saveCTLSchema(generateCTLSchemaDto(DEFAULT_FQN+4, tenant.getId(), null, 4));
dependency.add(fourthSchema);
mainSchema = generateCTLSchemaDto(DEFAULT_FQN+5, tenant.getId(), null, 7);
mainSchema.setDependencySet(dependency);
mainSchema = ctlService.saveCTLSchema(mainSchema);
systemSchema = ctlService.saveCTLSchema(generateCTLSchemaDto(DEFAULT_FQN+6, null, null, 50));
tenantSchema = ctlService.saveCTLSchema(generateCTLSchemaDto(DEFAULT_FQN+7, tenant.getId(), null, 77));
tenantSchema2 = ctlService.saveCTLSchema(generateCTLSchemaDto(DEFAULT_FQN+7, tenant.getId(), null, 78));
CTLSchemaDto unsaved = generateCTLSchemaDto(DEFAULT_FQN+8, tenant.getId(), appDto.getId(), 80);
appSchema = ctlService.saveCTLSchema(unsaved);
unsaved = generateCTLSchemaDto(DEFAULT_FQN+8, tenant.getId(), appDto.getId(), 81);
appSchema2 = ctlService.saveCTLSchema(unsaved);
unsaved = generateCTLSchemaDto(DEFAULT_FQN+9, tenant.getId(), appDto.getId(), 2);
appSchema3 = ctlService.saveCTLSchema(unsaved);
unsaved = generateCTLSchemaDto(DEFAULT_FQN+8, tenant.getId(), appDto2.getId(), 11);
app2Schema = ctlService.saveCTLSchema(unsaved);
gamma = new CTLSchemaDto();
CTLSchemaMetaInfoDto gammaMetaInfo = new CTLSchemaMetaInfoDto("org.kaaproject.kaa.Gamma", tenant.getId());
gamma.setMetaInfo(gammaMetaInfo);
gamma.setVersion(1);
gamma.setBody(readSchemaFileAsString(TEST_CTL_SCHEMA_GAMMA));
gamma = ctlService.saveCTLSchema(gamma);
gamma = ctlService.findCTLSchemaById(gamma.getId());
beta = new CTLSchemaDto();
CTLSchemaMetaInfoDto betaMetaInfo = new CTLSchemaMetaInfoDto("org.kaaproject.kaa.Beta", tenant.getId());
beta.setMetaInfo(betaMetaInfo);
beta.setVersion(1);
Set<CTLSchemaDto> betaDependencies = new HashSet<>();
betaDependencies.add(gamma);
beta.setDependencySet(betaDependencies);
beta.setBody(readSchemaFileAsString(TEST_CTL_SCHEMA_BETA));
beta = ctlService.saveCTLSchema(beta);
beta = ctlService.findCTLSchemaById(beta.getId());
alpha = new CTLSchemaDto();
CTLSchemaMetaInfoDto alphaMetaInfo = new CTLSchemaMetaInfoDto("org.kaaproject.kaa.Alpha", tenant.getId());
alpha.setMetaInfo(alphaMetaInfo);
alpha.setVersion(1);
Set<CTLSchemaDto> alphaDependencies = new HashSet<>();
alphaDependencies.add(beta);
alpha.setDependencySet(alphaDependencies);
alpha.setBody(readSchemaFileAsString(TEST_CTL_SCHEMA_ALPHA));
alpha = ctlService.saveCTLSchema(alpha);
alpha = ctlService.findCTLSchemaById(alpha.getId());
}
@Test
public void testRemoveCTLSchemaByFqnAndVerAndTenantIdAndApplicationId() {
String schemaId = tenantSchema.getId();
ctlService.removeCTLSchemaByFqnAndVerAndTenantIdAndApplicationId(tenantSchema.getMetaInfo().getFqn(), tenantSchema.getVersion(),
tenantSchema.getMetaInfo().getTenantId(), tenantSchema.getMetaInfo().getApplicationId());
Assert.assertNull(ctlService.findCTLSchemaById(schemaId));
}
@Test
public void testRemoveCTLSchemaByFqnAndVerAndWithoutTenantId() {
String schemaId = systemSchema.getId();
ctlService.removeCTLSchemaByFqnAndVerAndTenantIdAndApplicationId(systemSchema.getMetaInfo().getFqn(), systemSchema.getVersion(),
systemSchema.getMetaInfo().getTenantId(), systemSchema.getMetaInfo().getApplicationId());
Assert.assertNull(ctlService.findCTLSchemaById(schemaId));
}
@Test
public void testFindCTLSchemaByFqnAndVerAndTenantIdAndApplicationId() {
CTLSchemaMetaInfoDto metaInfo = firstSchema.getMetaInfo();
CTLSchemaDto found = ctlService.findCTLSchemaByFqnAndVerAndTenantIdAndApplicationId(metaInfo.getFqn(),
firstSchema.getVersion(), metaInfo.getTenantId(), metaInfo.getApplicationId());
Assert.assertEquals(firstSchema, found);
}
@Test
public void testFindCTLSchemaById() {
CTLSchemaDto found = ctlService.findCTLSchemaById(firstSchema.getId());
Assert.assertEquals(firstSchema, found);
}
@Test
public void testFindSystemCTLSchemas() {
List<CTLSchemaDto> appSchemas = ctlService.findSystemCTLSchemas();
Assert.assertEquals(getIdsDto(Arrays.asList(defaultSystemSchema, systemSchema)), getIdsDto(appSchemas));
}
@Test
public void testFindSystemCTLSchemasMetaInfo() {
List<CTLSchemaMetaInfoDto> appSchemas = ctlService.findSystemCTLSchemasMetaInfo();
Comparator<HasId> comparator = new Comparator<HasId>() {
@Override
public int compare(HasId o1, HasId o2) {
return o1.getId().compareTo(o2.getId());
}
};
Collections.sort(appSchemas, comparator);
List<CTLSchemaMetaInfoDto> expectedSchemas = Arrays.asList(defaultSystemSchema.getMetaInfo(), systemSchema.getMetaInfo());
Collections.sort(expectedSchemas, comparator);
Assert.assertEquals(expectedSchemas, appSchemas);
}
@Test
public void testFindLatestCTLSchemaByFqn() {
CTLSchemaDto latestTenantScope = ctlService.findLatestCTLSchemaByFqnAndTenantIdAndApplicationId(DEFAULT_FQN+7, tenant.getId(), null);
Assert.assertEquals(Integer.valueOf(78), latestTenantScope.getVersion());
CTLSchemaDto latestAppScope = ctlService.findLatestCTLSchemaByFqnAndTenantIdAndApplicationId(DEFAULT_FQN+8, tenant.getId(), appDto.getId());
Assert.assertEquals(Integer.valueOf(81), latestAppScope.getVersion());
}
@Test
public void testScopeUpdate() {
CTLSchemaMetaInfoDto metaInfo = appSchema3.getMetaInfo();
metaInfo.setApplicationId(null);
ctlService.updateCTLSchemaMetaInfoScope(metaInfo);
CTLSchemaDto found = ctlService.findCTLSchemaByFqnAndVerAndTenantIdAndApplicationId(metaInfo.getFqn(), appSchema3.getVersion(), metaInfo.getTenantId(), null);
Assert.assertEquals(appSchema3, found);
}
@Test(expected = DatabaseProcessingException.class)
public void testScopeUpdateForbidden() {
CTLSchemaMetaInfoDto metaInfo = appSchema.getMetaInfo();
metaInfo.setApplicationId(null);
ctlService.updateCTLSchemaMetaInfoScope(metaInfo);
}
@Test
public void testFindSiblingsFqns() {
List<CTLSchemaMetaInfoDto> siblingSchemas =
ctlService.findSiblingsByFqnTenantIdAndApplicationId(appSchema.getMetaInfo().getFqn(), appSchema.getMetaInfo().getTenantId(), appSchema.getMetaInfo().getApplicationId());
Assert.assertNotNull(siblingSchemas);
Assert.assertEquals(1, siblingSchemas.size());
Assert.assertEquals(app2Schema.getMetaInfo(), siblingSchemas.get(0));
}
@Test
public void testFindCTLSchemaDependentsByFqnVersionTenantId() {
List<CTLSchemaDto> appSchemas = ctlService.findCTLSchemaDependents(firstSchema.getMetaInfo().getFqn(), firstSchema.getVersion(),
tenant.getId(), null);
Assert.assertEquals(Arrays.asList(mainSchema), appSchemas);
appSchemas = ctlService.findCTLSchemaDependents(secondSchema.getMetaInfo().getFqn(), secondSchema.getVersion(), tenant.getId(), null);
Assert.assertEquals(Arrays.asList(mainSchema), appSchemas);
appSchemas = ctlService.findCTLSchemaDependents(thirdSchema.getMetaInfo().getFqn(), thirdSchema.getVersion(), tenant.getId(), null);
Assert.assertEquals(Arrays.asList(mainSchema), appSchemas);
appSchemas = ctlService.findCTLSchemaDependents(fourthSchema.getMetaInfo().getFqn(), fourthSchema.getVersion(), tenant.getId(), null);
Assert.assertEquals(Arrays.asList(mainSchema), appSchemas);
appSchemas = ctlService.findCTLSchemaDependents(mainSchema.getMetaInfo().getFqn(), mainSchema.getVersion(), tenant.getId(), null);
Assert.assertTrue(appSchemas.isEmpty());
}
@Test
public void multiThreadCTLSchemaSaveTest() throws InterruptedException, ExecutionException {
List<Future<CTLSchemaDto>> list = new ArrayList<>();
for (int i = 0; i < 100; i++) {
list.add(executorService.submit(new Callable<CTLSchemaDto>() {
@Override
public CTLSchemaDto call() {
CTLSchemaDto sch = null;
try {
sch = ctlService.saveCTLSchema(generateCTLSchemaDto(generateTenantDto().getId()));
} catch (Throwable t) {
throw t;
}
return sch;
}
}));
}
Iterator<Future<CTLSchemaDto>> iterator = list.iterator();
List<CTLSchemaDto> schemas = new ArrayList<>();
while (iterator.hasNext()) {
Future<CTLSchemaDto> f = iterator.next();
while (!f.isDone()) {
}
schemas.add(f.get());
iterator.remove();
}
Assert.assertEquals(100, schemas.size());
for (CTLSchemaDto schema : schemas) {
CTLSchemaDto savedSchema = ctlService.findCTLSchemaByFqnAndVerAndTenantIdAndApplicationId(DEFAULT_FQN, 100, schema.getMetaInfo().getTenantId(), null);
Assert.assertNotNull(savedSchema);
Assert.assertEquals(schema, savedSchema);
}
}
@Test
public void testShallowExport() throws Exception {
ObjectMapper mapper = new ObjectMapper();
JsonNode expected = mapper.readTree(readSchemaFileAsString(TEST_CTL_SCHEMA_ALPHA));
JsonNode actual = mapper.readTree(ctlService.shallowExport(alpha).getFileData());
Assert.assertEquals(expected, actual);
}
@Test
public void testFlatExport() throws Exception {
ObjectMapper mapper = new ObjectMapper();
JsonNode expected = mapper.readTree(readSchemaFileAsString(TEST_CTL_SCHEMA_ALPHA_FLAT));
JsonNode actual = mapper.readTree(ctlService.flatExport(alpha).getFileData());
Assert.assertEquals(expected, actual);
}
}
| |
package org.zstack.network.l2;
import org.springframework.beans.factory.annotation.Autowired;
import org.zstack.core.Platform;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.CloudBusListCallBack;
import org.zstack.core.cloudbus.MessageSafe;
import org.zstack.core.componentloader.PluginRegistry;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.DbEntityLister;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.AbstractService;
import org.zstack.header.core.Completion;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.host.HostAddExtensionPoint;
import org.zstack.header.host.HostInventory;
import org.zstack.header.host.HypervisorType;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.MessageReply;
import org.zstack.header.network.*;
import org.zstack.header.network.l2.*;
import org.zstack.query.QueryFacade;
import org.zstack.search.GetQuery;
import org.zstack.search.SearchQuery;
import org.zstack.tag.TagManager;
import org.zstack.utils.ObjectUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.logging.CLogger;
import java.util.*;
public class L2NetworkManagerImpl extends AbstractService implements L2NetworkManager, HostAddExtensionPoint {
private static final CLogger logger = Utils.getLogger(L2NetworkManagerImpl.class);
@Autowired
private CloudBus bus;
@Autowired
private DatabaseFacade dbf;
@Autowired
private PluginRegistry pluginRgty;
@Autowired
private DbEntityLister dl;
@Autowired
private TagManager tagMgr;
@Autowired
private ErrorFacade errf;
private Map<String, L2NetworkFactory> l2NetworkFactories = Collections.synchronizedMap(new HashMap<String, L2NetworkFactory>());
private Map<L2NetworkType, Map<HypervisorType, L2NetworkRealizationExtensionPoint>> realizationExts = new HashMap<L2NetworkType, Map<HypervisorType, L2NetworkRealizationExtensionPoint>>();
private List<L2NetworkCreateExtensionPoint> createExtensions = new ArrayList<L2NetworkCreateExtensionPoint>();
private static final Set<Class> allowedMessageAfterSoftDeletion = new HashSet<Class>();
static {
allowedMessageAfterSoftDeletion.add(L2NetworkDeletionMsg.class);
}
@Override
@MessageSafe
public void handleMessage(Message msg) {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage)msg);
} else {
handleLocalMessage(msg);
}
}
private void handleLocalMessage(Message msg) {
if (msg instanceof L2NetworkMessage) {
passThrough((L2NetworkMessage)msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handleApiMessage(APIMessage msg) {
if (msg instanceof APICreateL2NetworkMsg) {
handle((APICreateL2NetworkMsg)msg);
} else if (msg instanceof APIListL2NetworkMsg) {
handle((APIListL2NetworkMsg) msg);
} else if (msg instanceof APISearchL2NetworkMsg) {
handle((APISearchL2NetworkMsg)msg);
} else if (msg instanceof APISearchL2VlanNetworkMsg) {
handle((APISearchL2VlanNetworkMsg)msg);
} else if (msg instanceof APIGetL2NetworkMsg) {
handle((APIGetL2NetworkMsg) msg);
} else if (msg instanceof APIGetL2VlanNetworkMsg) {
handle((APIGetL2VlanNetworkMsg)msg);
} else if (msg instanceof APIGetL2NetworkTypesMsg) {
handle((APIGetL2NetworkTypesMsg) msg);
} else if (msg instanceof L2NetworkMessage) {
passThrough((L2NetworkMessage) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(APIGetL2NetworkTypesMsg msg) {
List<String> types = new ArrayList<String>();
types.addAll(L2NetworkType.getAllTypeNames());
APIGetL2NetworkTypesReply reply = new APIGetL2NetworkTypesReply();
reply.setL2NetworkTypes(types);
bus.reply(msg, reply);
}
private void handle(APIGetL2VlanNetworkMsg msg) {
GetQuery q = new GetQuery();
String res = q.getAsString(msg, L2VlanNetworkInventory.class);
APIGetL2VlanNetworkReply reply = new APIGetL2VlanNetworkReply();
reply.setInventory(res);
bus.reply(msg, reply);
}
private void handle(APISearchL2VlanNetworkMsg msg) {
SearchQuery<L2VlanNetworkInventory> sq = SearchQuery.create(msg, L2VlanNetworkInventory.class);
String content = sq.listAsString();
APISearchL2VlanNetworkReply reply = new APISearchL2VlanNetworkReply();
reply.setContent(content);
bus.reply(msg, reply);
}
private void handle(APIGetL2NetworkMsg msg) {
GetQuery q = new GetQuery();
String res = q.getAsString(msg, L2NetworkInventory.class);
APIGetL2NetworkReply reply = new APIGetL2NetworkReply();
reply.setInventory(res);
bus.reply(msg, reply);
}
private void handle(APISearchL2NetworkMsg msg) {
SearchQuery<L2NetworkInventory> sq = SearchQuery.create(msg, L2NetworkInventory.class);
String content = sq.listAsString();
APISearchL2NetworkReply reply = new APISearchL2NetworkReply();
reply.setContent(content);
bus.reply(msg, reply);
}
private void passThrough(L2NetworkMessage msg) {
Message amsg = (Message) msg;
L2NetworkVO vo = dbf.findByUuid(msg.getL2NetworkUuid(), L2NetworkVO.class);
if (vo == null && allowedMessageAfterSoftDeletion.contains(msg.getClass())) {
L2NetworkEO eo = dbf.findByUuid(msg.getL2NetworkUuid(), L2NetworkEO.class);
vo = ObjectUtils.newAndCopy(eo, L2NetworkVO.class);
}
if (vo == null) {
ErrorCode errCode = errf.instantiateErrorCode(SysErrors.RESOURCE_NOT_FOUND, String.format("unable to find L2Network[uuid:%s], it may have been deleted", msg.getL2NetworkUuid()));
bus.replyErrorByMessageType((Message)msg, errCode);
return;
}
L2NetworkFactory factory = getL2NetworkFactory(L2NetworkType.valueOf(vo.getType()));
L2Network nw = factory.getL2Network(vo);
nw.handleMessage(amsg);
}
private void handle(APIListL2NetworkMsg msg) {
List<L2NetworkVO> vos = dl.listByApiMessage(msg, L2NetworkVO.class);
List<L2NetworkInventory> invs = L2NetworkInventory.valueOf(vos);
APIListL2NetworkReply reply = new APIListL2NetworkReply();
reply.setInventories(invs);
bus.reply(msg, reply);
}
private void handle(APICreateL2NetworkMsg msg) {
for (L2NetworkCreateExtensionPoint extp : createExtensions) {
try {
extp.beforeCreateL2Network(msg);
} catch (NetworkException e) {
String err = String.format("unable to create l2network[name:%s, type:%s], %s", msg.getName(), msg.getType(), e.getMessage());
logger.warn(err, e);
APICreateL2NetworkEvent evt = new APICreateL2NetworkEvent(msg.getId());
evt.setErrorCode(errf.instantiateErrorCode(SysErrors.CREATE_RESOURCE_ERROR, err));
bus.publish(evt);
return;
}
}
L2NetworkType type = L2NetworkType.valueOf(msg.getType());
L2NetworkFactory factory = getL2NetworkFactory(type);
L2NetworkVO vo = new L2NetworkVO();
if (msg.getResourceUuid() != null) {
vo.setUuid(msg.getResourceUuid());
} else {
vo.setUuid(Platform.getUuid());
}
vo.setDescription(msg.getDescription());
vo.setName(msg.getName());
vo.setPhysicalInterface(msg.getPhysicalInterface());
vo.setType(type.toString());
vo.setZoneUuid(msg.getZoneUuid());
L2NetworkInventory inv = factory.createL2Network(vo, msg);
tagMgr.createTagsFromAPICreateMessage(msg, inv.getUuid(), L2NetworkVO.class.getSimpleName());
for (L2NetworkCreateExtensionPoint extp : createExtensions) {
try {
extp.afterCreateL2Network(inv);
} catch (Exception e) {
logger.warn(String.format("unhandled exception happened when calling %s", extp.getClass().getName()), e);
}
}
APICreateL2NetworkEvent evt = new APICreateL2NetworkEvent(msg.getId());
evt.setInventory(inv);
bus.publish(evt);
}
@Override
public String getId() {
return bus.makeLocalServiceId(L2NetworkConstant.SERVICE_ID);
}
@Override
public boolean start() {
populateExtensions();
return true;
}
@Override
public boolean stop() {
return false;
}
@Override
public L2NetworkFactory getL2NetworkFactory(L2NetworkType type) {
L2NetworkFactory factory = l2NetworkFactories.get(type.toString());
if (factory == null) {
throw new CloudRuntimeException(String.format("Cannot find L2NetworkFactory for type(%s)", type));
}
return factory;
}
@Override
public L2NetworkRealizationExtensionPoint getRealizationExtension(L2NetworkType l2Type, HypervisorType hvType) {
Map<HypervisorType, L2NetworkRealizationExtensionPoint> map = realizationExts.get(l2Type);
if (map == null) {
throw new IllegalArgumentException(String.format("Cannot find L2NetworkRealizationExtensionPoint supporting L2NetworkType[%s]", l2Type));
}
L2NetworkRealizationExtensionPoint extp = map.get(hvType);
if (extp == null) {
throw new IllegalArgumentException(String.format("Cannot find L2NetworkRealizationExtensionPoint for L2NetworkType[%s] supporting hypervisor[%s]", l2Type, hvType));
}
return extp;
}
private void populateExtensions() {
for (L2NetworkFactory f : pluginRgty.getExtensionList(L2NetworkFactory.class)) {
L2NetworkFactory old = l2NetworkFactories.get(f.getType().toString());
if (old != null) {
throw new CloudRuntimeException(String.format("duplicate L2NetworkFactory[%s, %s] for type[%s]",
f.getClass().getName(), old.getClass().getName(), f.getType()));
}
l2NetworkFactories.put(f.getType().toString(), f);
}
for (L2NetworkRealizationExtensionPoint extp : pluginRgty.getExtensionList(L2NetworkRealizationExtensionPoint.class)) {
Map<HypervisorType, L2NetworkRealizationExtensionPoint> map = realizationExts.get(extp.getSupportedL2NetworkType());
if (map == null) {
map = new HashMap<HypervisorType, L2NetworkRealizationExtensionPoint>(1);
realizationExts.put(extp.getSupportedL2NetworkType(), map);
}
map.put(extp.getSupportedHypervisorType(), extp);
}
createExtensions = pluginRgty.getExtensionList(L2NetworkCreateExtensionPoint.class);
}
@Override
public void beforeAddHost(HostInventory host, Completion completion) {
completion.success();
}
@Override
public void afterAddHost(HostInventory host, final Completion completion) {
SimpleQuery<L2NetworkClusterRefVO> q = dbf.createQuery(L2NetworkClusterRefVO.class);
q.select(L2NetworkClusterRefVO_.l2NetworkUuid);
q.add(L2NetworkClusterRefVO_.clusterUuid, SimpleQuery.Op.EQ, host.getClusterUuid());
List<String> l2uuids = q.listValue();
if (l2uuids.isEmpty()) {
completion.success();
return;
}
List<PrepareL2NetworkOnHostMsg> msgs = new ArrayList<PrepareL2NetworkOnHostMsg>();
for (String l2uuid : l2uuids) {
PrepareL2NetworkOnHostMsg msg = new PrepareL2NetworkOnHostMsg();
msg.setL2NetworkUuid(l2uuid);
msg.setHost(host);
bus.makeTargetServiceIdByResourceUuid(msg, L2NetworkConstant.SERVICE_ID, l2uuid);
msgs.add(msg);
}
bus.send(msgs, new CloudBusListCallBack(completion) {
@Override
public void run(List<MessageReply> replies) {
for (MessageReply reply : replies) {
if (!reply.isSuccess()) {
completion.fail(reply.getError());
return;
}
}
completion.success();
}
});
}
}
| |
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.eclipse.bpel4chor.model.pbd.impl;
import org.eclipse.bpel4chor.model.pbd.OpaqueBoolean;
import org.eclipse.bpel4chor.model.pbd.PbdPackage;
import org.eclipse.bpel4chor.model.pbd.Query;
import org.eclipse.bpel4chor.model.pbd.To;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>To</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.eclipse.bpel4chor.model.pbd.impl.ToImpl#getQuery <em>Query</em>}</li>
* <li>{@link org.eclipse.bpel4chor.model.pbd.impl.ToImpl#getOpaque <em>Opaque</em>}</li>
* <li>{@link org.eclipse.bpel4chor.model.pbd.impl.ToImpl#getExpressionLanguage <em>Expression Language</em>}</li>
* <li>{@link org.eclipse.bpel4chor.model.pbd.impl.ToImpl#getVariable <em>Variable</em>}</li>
* <li>{@link org.eclipse.bpel4chor.model.pbd.impl.ToImpl#getPart <em>Part</em>}</li>
* <li>{@link org.eclipse.bpel4chor.model.pbd.impl.ToImpl#getProperty <em>Property</em>}</li>
* <li>{@link org.eclipse.bpel4chor.model.pbd.impl.ToImpl#getPartnerLink <em>Partner Link</em>}</li>
* <li>{@link org.eclipse.bpel4chor.model.pbd.impl.ToImpl#getExpression <em>Expression</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class ToImpl extends ExtensibleElementsImpl implements To {
/**
* The cached value of the '{@link #getQuery() <em>Query</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getQuery()
* @generated
* @ordered
*/
protected Query query;
/**
* The default value of the '{@link #getOpaque() <em>Opaque</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOpaque()
* @generated
* @ordered
*/
protected static final OpaqueBoolean OPAQUE_EDEFAULT = OpaqueBoolean.NO;
/**
* The cached value of the '{@link #getOpaque() <em>Opaque</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOpaque()
* @generated
* @ordered
*/
protected OpaqueBoolean opaque = OPAQUE_EDEFAULT;
/**
* The default value of the '{@link #getExpressionLanguage() <em>Expression Language</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getExpressionLanguage()
* @generated
* @ordered
*/
protected static final String EXPRESSION_LANGUAGE_EDEFAULT = null;
/**
* The cached value of the '{@link #getExpressionLanguage() <em>Expression Language</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getExpressionLanguage()
* @generated
* @ordered
*/
protected String expressionLanguage = EXPRESSION_LANGUAGE_EDEFAULT;
/**
* The default value of the '{@link #getVariable() <em>Variable</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getVariable()
* @generated
* @ordered
*/
protected static final String VARIABLE_EDEFAULT = null;
/**
* The cached value of the '{@link #getVariable() <em>Variable</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getVariable()
* @generated
* @ordered
*/
protected String variable = VARIABLE_EDEFAULT;
/**
* The default value of the '{@link #getPart() <em>Part</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPart()
* @generated
* @ordered
*/
protected static final String PART_EDEFAULT = null;
/**
* The cached value of the '{@link #getPart() <em>Part</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPart()
* @generated
* @ordered
*/
protected String part = PART_EDEFAULT;
/**
* The default value of the '{@link #getProperty() <em>Property</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getProperty()
* @generated
* @ordered
*/
protected static final String PROPERTY_EDEFAULT = null;
/**
* The cached value of the '{@link #getProperty() <em>Property</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getProperty()
* @generated
* @ordered
*/
protected String property = PROPERTY_EDEFAULT;
/**
* The default value of the '{@link #getPartnerLink() <em>Partner Link</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPartnerLink()
* @generated
* @ordered
*/
protected static final String PARTNER_LINK_EDEFAULT = null;
/**
* The cached value of the '{@link #getPartnerLink() <em>Partner Link</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPartnerLink()
* @generated
* @ordered
*/
protected String partnerLink = PARTNER_LINK_EDEFAULT;
/**
* The default value of the '{@link #getExpression() <em>Expression</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getExpression()
* @generated
* @ordered
*/
protected static final String EXPRESSION_EDEFAULT = null;
/**
* The cached value of the '{@link #getExpression() <em>Expression</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getExpression()
* @generated
* @ordered
*/
protected String expression = EXPRESSION_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ToImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return PbdPackage.Literals.TO;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Query getQuery() {
return query;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetQuery(Query newQuery, NotificationChain msgs) {
Query oldQuery = query;
query = newQuery;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, PbdPackage.TO__QUERY, oldQuery, newQuery);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setQuery(Query newQuery) {
if (newQuery != query) {
NotificationChain msgs = null;
if (query != null)
msgs = ((InternalEObject)query).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - PbdPackage.TO__QUERY, null, msgs);
if (newQuery != null)
msgs = ((InternalEObject)newQuery).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - PbdPackage.TO__QUERY, null, msgs);
msgs = basicSetQuery(newQuery, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.TO__QUERY, newQuery, newQuery));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public OpaqueBoolean getOpaque() {
return opaque;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setOpaque(OpaqueBoolean newOpaque) {
OpaqueBoolean oldOpaque = opaque;
opaque = newOpaque == null ? OPAQUE_EDEFAULT : newOpaque;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.TO__OPAQUE, oldOpaque, opaque));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getExpressionLanguage() {
return expressionLanguage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setExpressionLanguage(String newExpressionLanguage) {
String oldExpressionLanguage = expressionLanguage;
expressionLanguage = newExpressionLanguage;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.TO__EXPRESSION_LANGUAGE, oldExpressionLanguage, expressionLanguage));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getVariable() {
return variable;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setVariable(String newVariable) {
String oldVariable = variable;
variable = newVariable;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.TO__VARIABLE, oldVariable, variable));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getPart() {
return part;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPart(String newPart) {
String oldPart = part;
part = newPart;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.TO__PART, oldPart, part));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getProperty() {
return property;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setProperty(String newProperty) {
String oldProperty = property;
property = newProperty;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.TO__PROPERTY, oldProperty, property));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getPartnerLink() {
return partnerLink;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setPartnerLink(String newPartnerLink) {
String oldPartnerLink = partnerLink;
partnerLink = newPartnerLink;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.TO__PARTNER_LINK, oldPartnerLink, partnerLink));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getExpression() {
return expression;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setExpression(String newExpression) {
String oldExpression = expression;
expression = newExpression;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, PbdPackage.TO__EXPRESSION, oldExpression, expression));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case PbdPackage.TO__QUERY:
return basicSetQuery(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case PbdPackage.TO__QUERY:
return getQuery();
case PbdPackage.TO__OPAQUE:
return getOpaque();
case PbdPackage.TO__EXPRESSION_LANGUAGE:
return getExpressionLanguage();
case PbdPackage.TO__VARIABLE:
return getVariable();
case PbdPackage.TO__PART:
return getPart();
case PbdPackage.TO__PROPERTY:
return getProperty();
case PbdPackage.TO__PARTNER_LINK:
return getPartnerLink();
case PbdPackage.TO__EXPRESSION:
return getExpression();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case PbdPackage.TO__QUERY:
setQuery((Query)newValue);
return;
case PbdPackage.TO__OPAQUE:
setOpaque((OpaqueBoolean)newValue);
return;
case PbdPackage.TO__EXPRESSION_LANGUAGE:
setExpressionLanguage((String)newValue);
return;
case PbdPackage.TO__VARIABLE:
setVariable((String)newValue);
return;
case PbdPackage.TO__PART:
setPart((String)newValue);
return;
case PbdPackage.TO__PROPERTY:
setProperty((String)newValue);
return;
case PbdPackage.TO__PARTNER_LINK:
setPartnerLink((String)newValue);
return;
case PbdPackage.TO__EXPRESSION:
setExpression((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case PbdPackage.TO__QUERY:
setQuery((Query)null);
return;
case PbdPackage.TO__OPAQUE:
setOpaque(OPAQUE_EDEFAULT);
return;
case PbdPackage.TO__EXPRESSION_LANGUAGE:
setExpressionLanguage(EXPRESSION_LANGUAGE_EDEFAULT);
return;
case PbdPackage.TO__VARIABLE:
setVariable(VARIABLE_EDEFAULT);
return;
case PbdPackage.TO__PART:
setPart(PART_EDEFAULT);
return;
case PbdPackage.TO__PROPERTY:
setProperty(PROPERTY_EDEFAULT);
return;
case PbdPackage.TO__PARTNER_LINK:
setPartnerLink(PARTNER_LINK_EDEFAULT);
return;
case PbdPackage.TO__EXPRESSION:
setExpression(EXPRESSION_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case PbdPackage.TO__QUERY:
return query != null;
case PbdPackage.TO__OPAQUE:
return opaque != OPAQUE_EDEFAULT;
case PbdPackage.TO__EXPRESSION_LANGUAGE:
return EXPRESSION_LANGUAGE_EDEFAULT == null ? expressionLanguage != null : !EXPRESSION_LANGUAGE_EDEFAULT.equals(expressionLanguage);
case PbdPackage.TO__VARIABLE:
return VARIABLE_EDEFAULT == null ? variable != null : !VARIABLE_EDEFAULT.equals(variable);
case PbdPackage.TO__PART:
return PART_EDEFAULT == null ? part != null : !PART_EDEFAULT.equals(part);
case PbdPackage.TO__PROPERTY:
return PROPERTY_EDEFAULT == null ? property != null : !PROPERTY_EDEFAULT.equals(property);
case PbdPackage.TO__PARTNER_LINK:
return PARTNER_LINK_EDEFAULT == null ? partnerLink != null : !PARTNER_LINK_EDEFAULT.equals(partnerLink);
case PbdPackage.TO__EXPRESSION:
return EXPRESSION_EDEFAULT == null ? expression != null : !EXPRESSION_EDEFAULT.equals(expression);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (opaque: ");
result.append(opaque);
result.append(", expressionLanguage: ");
result.append(expressionLanguage);
result.append(", variable: ");
result.append(variable);
result.append(", part: ");
result.append(part);
result.append(", property: ");
result.append(property);
result.append(", partnerLink: ");
result.append(partnerLink);
result.append(", expression: ");
result.append(expression);
result.append(')');
return result.toString();
}
} //ToImpl
| |
package edu.ucdenver.ccp.datasource.fileparsers.obo;
/*
* #%L
* Colorado Computational Pharmacology's datasource
* project
* %%
* Copyright (C) 2012 - 2016 Regents of the University of Colorado
* %%
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.apache.log4j.Logger;
import org.apache.tools.ant.util.StringUtils;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.OWLProperty;
import owltools.graph.OWLGraphWrapper;
public class OntologyUtil {
private static final String INVALID_OBO_IN_OWL_NAMESPACE = "http://www.geneontology.org/formats/oboInOWL#";
private static final String NAMESPACE_PROP = "<http://purl.obolibrary.org/obo/namespace>";
private static final String NAMESPACE_PROP_ALT = "<http://www.geneontology.org/formats/oboInOwl#hasOBONamespace>";
private static final Logger logger = Logger.getLogger(OntologyUtil.class);
private static final String EXACT_SYN_PROP = "<http://www.geneontology.org/formats/oboInOwl#hasExactSynonym>";
private static final String EXACT_SYN_PROP_ALT = "<http://purl.obolibrary.org/obo/exact_synonym>";
private static final String RELATED_SYN_PROP = "<http://www.geneontology.org/formats/oboInOwl#hasRelatedSynonym>";
private static final String RELATED_SYN_PROP_ALT = "<http://purl.obolibrary.org/obo/related_synonym>";
private static final String NARROW_SYN_PROP = "<http://www.geneontology.org/formats/oboInOwl#hasNarrowSynonym>";
private static final String NARROW_SYN_PROP_ALT = "<http://purl.obolibrary.org/obo/narrow_synonym>";
private static final String BROAD_SYN_PROP = "<http://www.geneontology.org/formats/oboInOwl#hasBroadSynonym>";
private static final String BROAD_SYN_PROP_ALT = "<http://purl.obolibrary.org/obo/broad_synonym>";
private final OWLGraphWrapper graph;
private OWLOntology ont;
public enum SynonymType {
RELATED(0), EXACT(1), NARROW(2), BROAD(3), ALL(-1);
private final int scope;
private SynonymType(int scope) {
this.scope = scope;
}
public int scope() {
return scope;
}
public static SynonymType getTypeFromScope(int scope) {
for (SynonymType type : SynonymType.values()) {
if (type.scope() == scope) {
return type;
}
}
throw new IllegalArgumentException("Unknown scope ID: " + scope);
}
}
public OntologyUtil(File ontologyFile) throws OWLOntologyCreationException {
OWLOntologyManager inputOntologyManager = OWLManager.createOWLOntologyManager();
ont = inputOntologyManager.loadOntologyFromOntologyDocument(ontologyFile);
graph = new OWLGraphWrapper(ont);
}
public Set<OWLClass> getAncestors(OWLClass cls) {
return graph.getAncestorsThroughIsA(cls);
}
public Set<OWLClass> getDescendents(OWLClass cls) {
return graph.getDescendantsThroughIsA(cls);
}
public boolean isDescendent(OWLClass possibleChild, OWLClass possibleParent) {
return getDescendents(possibleParent).contains(possibleChild);
}
/**
* @return an iterator of all classes in the ontology (does not include
* obsolete classes).
*/
public Iterator<OWLClass> getClassIterator() {
return graph.getAllOWLClasses().iterator();
}
public void close() throws IOException {
graph.close();
}
public OWLClass getOWLClassFromId(String id) {
return graph.getOWLClassByIdentifier(id);
}
public boolean isObsolete(OWLClass cls) {
Set<OWLAnnotation> annotations = cls.getAnnotations(ont);
for (OWLAnnotation annotation : annotations) {
if (annotation.isDeprecatedIRIAnnotation()) {
return true;
}
}
return false;
}
public String getLabel(OWLClass cls) {
Set<OWLAnnotation> annotations = cls.getAnnotations(ont);
for (OWLAnnotation annotation : annotations) {
if (annotation.getProperty().isLabel()) {
String s = annotation.getValue().toString();
s = StringUtils.removePrefix(s, "\"");
s = StringUtils.removeSuffix(s, "\"^^xsd:string");
return s;
}
}
return null;
}
/**
* This method was composed in response to the following issue:
* https://github.com/UCDenver-ccp/datasource/issues/5
*
* The user uncovered an inconsistency in the oboInOwl namespace returned by
* the OWL API OBO parser. The inconsistency involves the capitalization of
* "OWL" in oboInOWL. The OBO parsers uses
* http://www.geneontology.org/formats/oboInOWL# whereas the namespace
* appears as http://www.geneontology.org/formats/oboInOwl# in OWL files in
* the wild. This method swaps out the oboInOWL for oboInOwl when it is
* observed.
*
* @param annotation
* @return the {@link OWLProperty} IRI for the input {@link OWLAnnotation}.
* If the invalid version of the oboInOwl namespace is detected
* (used by the OWL API OBO parser), it is replaced with the valid
* version which differs only in capitalization.
*/
public static String getAnnotationPropertyUri(OWLAnnotation annotation) {
String propertyUri = annotation.getProperty().toString();
if (propertyUri.startsWith("<" + INVALID_OBO_IN_OWL_NAMESPACE)) {
propertyUri = propertyUri.replaceFirst("oboInOWL", "oboInOwl");
}
return propertyUri;
}
public Set<String> getSynonyms(OWLClass cls, SynonymType synType) {
Set<String> synonyms = new HashSet<String>();
Set<OWLAnnotation> annotations = cls.getAnnotations(ont);
for (OWLAnnotation annotation : annotations) {
String property = getAnnotationPropertyUri(annotation);
if ((synType == SynonymType.EXACT || synType == SynonymType.ALL)
&& (property.equals(EXACT_SYN_PROP) || property.equals(EXACT_SYN_PROP_ALT))) {
String s = annotation.getValue().toString();
s = StringUtils.removePrefix(s, "\"");
s = StringUtils.removeSuffix(s, "\"^^xsd:string");
s = StringUtils.removePrefix(s, "\\\"");
s = StringUtils.removeSuffix(s, "\\\" []");
synonyms.add(s);
} else if ((synType == SynonymType.RELATED || synType == SynonymType.ALL)
&& (property.equals(RELATED_SYN_PROP) || property.equals(RELATED_SYN_PROP_ALT))) {
String s = annotation.getValue().toString();
s = StringUtils.removePrefix(s, "\"");
s = StringUtils.removeSuffix(s, "\"^^xsd:string");
s = StringUtils.removePrefix(s, "\\\"");
s = StringUtils.removeSuffix(s, "\\\" []");
synonyms.add(s);
} else if ((synType == SynonymType.BROAD || synType == SynonymType.ALL)
&& (property.equals(BROAD_SYN_PROP) || property.equals(BROAD_SYN_PROP_ALT))) {
String s = annotation.getValue().toString();
s = StringUtils.removePrefix(s, "\"");
s = StringUtils.removeSuffix(s, "\"^^xsd:string");
s = StringUtils.removePrefix(s, "\\\"");
s = StringUtils.removeSuffix(s, "\\\" []");
synonyms.add(s);
} else if ((synType == SynonymType.NARROW || synType == SynonymType.ALL)
&& (property.equals(NARROW_SYN_PROP) || property.equals(NARROW_SYN_PROP_ALT))) {
String s = annotation.getValue().toString();
s = StringUtils.removePrefix(s, "\"");
s = StringUtils.removeSuffix(s, "\"^^xsd:string");
s = StringUtils.removePrefix(s, "\\\"");
s = StringUtils.removeSuffix(s, "\\\" []");
synonyms.add(s);
}
if (property.contains("ynonym")
&& !(property.equals(BROAD_SYN_PROP) || property.equals(BROAD_SYN_PROP_ALT)
|| property.equals(EXACT_SYN_PROP) || property.equals(EXACT_SYN_PROP_ALT)
|| property.equals(NARROW_SYN_PROP) || property.equals(NARROW_SYN_PROP_ALT)
|| property.equals(RELATED_SYN_PROP) || property.equals(RELATED_SYN_PROP_ALT))) {
logger.error("Unhandled synonym type: " + annotation.getProperty());
}
// System.out.println("PROP: " + annotation.getProperty());
// System.out.println("VALUE: " + annotation.getValue());
}
return synonyms;
}
public String getNamespace(OWLClass cls) {
Set<OWLAnnotation> annotations = cls.getAnnotations(ont);
for (OWLAnnotation annotation : annotations) {
String propertyUri = getAnnotationPropertyUri(annotation);
if (propertyUri.equals(NAMESPACE_PROP_ALT) || propertyUri.equals(NAMESPACE_PROP)) {
String s = annotation.getValue().toString();
s = StringUtils.removePrefix(s, "\"");
s = StringUtils.removeSuffix(s, "\"^^xsd:string");
return s;
}
}
return null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.jobhistory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.TaskAttemptID;
import org.apache.hadoop.mapred.TaskID;
import org.apache.hadoop.mapred.TaskStatus;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.TaskType;
import org.junit.Assert;
import org.junit.Test;
import org.skyscreamer.jsonassert.JSONAssert;
import org.skyscreamer.jsonassert.JSONCompareMode;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.TimeZone;
public class TestHistoryViewerPrinter {
private static final Log LOG = LogFactory.getLog(
TestHistoryViewerPrinter.class);
@Test
public void testHumanPrinter() throws Exception {
JobHistoryParser.JobInfo job = createJobInfo();
HumanReadableHistoryViewerPrinter printer =
new HumanReadableHistoryViewerPrinter(job, false, "http://",
TimeZone.getTimeZone("GMT"));
String outStr = run(printer);
Assert.assertEquals("\n" +
"Hadoop job: job_1317928501754_0001\n" +
"=====================================\n" +
"User: rkanter\n" +
"JobName: my job\n" +
"JobConf: /tmp/job.xml\n" +
"Submitted At: 6-Oct-2011 19:15:01\n" +
"Launched At: 6-Oct-2011 19:15:02 (1sec)\n" +
"Finished At: 6-Oct-2011 19:15:16 (14sec)\n" +
"Status: SUCCEEDED\n" +
"Counters: \n" +
"\n" +
"|Group Name |Counter name |Map Value |Reduce Value|Total Value|\n" +
"---------------------------------------------------------------------------------------\n" +
"|group1 |counter1 |5 |5 |5 \n" +
"|group1 |counter2 |10 |10 |10 \n" +
"|group2 |counter1 |15 |15 |15 \n" +
"\n" +
"=====================================\n" +
"\n" +
"Task Summary\n" +
"============================\n" +
"Kind\tTotal\tSuccessful\tFailed\tKilled\tStartTime\tFinishTime\n" +
"\n" +
"Setup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\n" +
"Map\t6\t5\t\t1\t0\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:16 (12sec)\n" +
"Reduce\t1\t1\t\t0\t0\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\n" +
"Cleanup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\n" +
"============================\n" +
"\n" +
"\n" +
"Analysis\n" +
"=========\n" +
"\n" +
"Time taken by best performing map task task_1317928501754_0001_m_000003: 3sec\n" +
"Average time taken by map tasks: 5sec\n" +
"Worse performing map tasks: \n" +
"TaskId\t\tTimetaken\n" +
"task_1317928501754_0001_m_000007 7sec\n" +
"task_1317928501754_0001_m_000006 6sec\n" +
"task_1317928501754_0001_m_000005 5sec\n" +
"task_1317928501754_0001_m_000004 4sec\n" +
"task_1317928501754_0001_m_000003 3sec\n" +
"The last map task task_1317928501754_0001_m_000007 finished at (relative to the Job launch time): 6-Oct-2011 19:15:16 (14sec)\n" +
"\n" +
"Time taken by best performing shuffle task task_1317928501754_0001_r_000008: 8sec\n" +
"Average time taken by shuffle tasks: 8sec\n" +
"Worse performing shuffle tasks: \n" +
"TaskId\t\tTimetaken\n" +
"task_1317928501754_0001_r_000008 8sec\n" +
"The last shuffle task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" +
"\n" +
"Time taken by best performing reduce task task_1317928501754_0001_r_000008: 0sec\n" +
"Average time taken by reduce tasks: 0sec\n" +
"Worse performing reduce tasks: \n" +
"TaskId\t\tTimetaken\n" +
"task_1317928501754_0001_r_000008 0sec\n" +
"The last reduce task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" +
"=========\n" +
"\n" +
"FAILED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000002\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\t\t\n" +
"\n" +
"FAILED task attempts by nodes\n" +
"Hostname\tFailedTasks\n" +
"===============================\n" +
"localhost\ttask_1317928501754_0001_m_000002, \n", outStr);
}
@Test
public void testHumanPrinterAll() throws Exception {
JobHistoryParser.JobInfo job = createJobInfo();
HumanReadableHistoryViewerPrinter printer =
new HumanReadableHistoryViewerPrinter(job, true, "http://",
TimeZone.getTimeZone("GMT"));
String outStr = run(printer);
if (System.getProperty("java.version").startsWith("1.7")) {
Assert.assertEquals("\n" +
"Hadoop job: job_1317928501754_0001\n" +
"=====================================\n" +
"User: rkanter\n" +
"JobName: my job\n" +
"JobConf: /tmp/job.xml\n" +
"Submitted At: 6-Oct-2011 19:15:01\n" +
"Launched At: 6-Oct-2011 19:15:02 (1sec)\n" +
"Finished At: 6-Oct-2011 19:15:16 (14sec)\n" +
"Status: SUCCEEDED\n" +
"Counters: \n" +
"\n" +
"|Group Name |Counter name |Map Value |Reduce Value|Total Value|\n" +
"---------------------------------------------------------------------------------------\n" +
"|group1 |counter1 |5 |5 |5 \n" +
"|group1 |counter2 |10 |10 |10 \n" +
"|group2 |counter1 |15 |15 |15 \n" +
"\n" +
"=====================================\n" +
"\n" +
"Task Summary\n" +
"============================\n" +
"Kind\tTotal\tSuccessful\tFailed\tKilled\tStartTime\tFinishTime\n" +
"\n" +
"Setup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\n" +
"Map\t6\t5\t\t1\t0\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:16 (12sec)\n" +
"Reduce\t1\t1\t\t0\t0\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\n" +
"Cleanup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\n" +
"============================\n" +
"\n" +
"\n" +
"Analysis\n" +
"=========\n" +
"\n" +
"Time taken by best performing map task task_1317928501754_0001_m_000003: 3sec\n" +
"Average time taken by map tasks: 5sec\n" +
"Worse performing map tasks: \n" +
"TaskId\t\tTimetaken\n" +
"task_1317928501754_0001_m_000007 7sec\n" +
"task_1317928501754_0001_m_000006 6sec\n" +
"task_1317928501754_0001_m_000005 5sec\n" +
"task_1317928501754_0001_m_000004 4sec\n" +
"task_1317928501754_0001_m_000003 3sec\n" +
"The last map task task_1317928501754_0001_m_000007 finished at (relative to the Job launch time): 6-Oct-2011 19:15:16 (14sec)\n" +
"\n" +
"Time taken by best performing shuffle task task_1317928501754_0001_r_000008: 8sec\n" +
"Average time taken by shuffle tasks: 8sec\n" +
"Worse performing shuffle tasks: \n" +
"TaskId\t\tTimetaken\n" +
"task_1317928501754_0001_r_000008 8sec\n" +
"The last shuffle task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" +
"\n" +
"Time taken by best performing reduce task task_1317928501754_0001_r_000008: 0sec\n" +
"Average time taken by reduce tasks: 0sec\n" +
"Worse performing reduce tasks: \n" +
"TaskId\t\tTimetaken\n" +
"task_1317928501754_0001_r_000008 0sec\n" +
"The last reduce task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" +
"=========\n" +
"\n" +
"FAILED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000002\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\t\t\n" +
"\n" +
"SUCCEEDED JOB_SETUP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\n" +
"====================================================\n" +
"task_1317928501754_0001_s_000001\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000006\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\t\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000005\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\t\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000004\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\t\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000003\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\t\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000007\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\t\t\n" +
"\n" +
"SUCCEEDED REDUCE task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\n" +
"====================================================\n" +
"task_1317928501754_0001_r_000008\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t\n" +
"\n" +
"SUCCEEDED JOB_CLEANUP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\n" +
"====================================================\n" +
"task_1317928501754_0001_c_000009\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\t\n" +
"\n" +
"JOB_SETUP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" +
"====================================================\n" +
"attempt_1317928501754_0001_s_000001_1\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_s_000001_1\n" +
"\n" +
"MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" +
"====================================================\n" +
"attempt_1317928501754_0001_m_000002_1\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000002_1\n" +
"attempt_1317928501754_0001_m_000006_1\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000006_1\n" +
"attempt_1317928501754_0001_m_000005_1\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000005_1\n" +
"attempt_1317928501754_0001_m_000004_1\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000004_1\n" +
"attempt_1317928501754_0001_m_000003_1\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000003_1\n" +
"attempt_1317928501754_0001_m_000007_1\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000007_1\n" +
"\n" +
"REDUCE task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tShuffleFinished\tSortFinished\tFinishTime\tHostName\tError\tTaskLogs\n" +
"====================================================\n" +
"attempt_1317928501754_0001_r_000008_1\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t6-Oct-2011 19:15:18 (0sec)6-Oct-2011 19:15:18 (8sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_r_000008_1\n" +
"\n" +
"JOB_CLEANUP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" +
"====================================================\n" +
"attempt_1317928501754_0001_c_000009_1\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_c_000009_1\n" +
"\n" +
"FAILED task attempts by nodes\n" +
"Hostname\tFailedTasks\n" +
"===============================\n" +
"localhost\ttask_1317928501754_0001_m_000002, \n", outStr);
} else {
Assert.assertEquals("\n" +
"Hadoop job: job_1317928501754_0001\n" +
"=====================================\n" +
"User: rkanter\n" +
"JobName: my job\n" +
"JobConf: /tmp/job.xml\n" +
"Submitted At: 6-Oct-2011 19:15:01\n" +
"Launched At: 6-Oct-2011 19:15:02 (1sec)\n" +
"Finished At: 6-Oct-2011 19:15:16 (14sec)\n" +
"Status: SUCCEEDED\n" +
"Counters: \n" +
"\n" +
"|Group Name |Counter name |Map Value |Reduce Value|Total Value|\n" +
"---------------------------------------------------------------------------------------\n" +
"|group1 |counter1 |5 |5 |5 \n" +
"|group1 |counter2 |10 |10 |10 \n" +
"|group2 |counter1 |15 |15 |15 \n" +
"\n" +
"=====================================\n" +
"\n" +
"Task Summary\n" +
"============================\n" +
"Kind\tTotal\tSuccessful\tFailed\tKilled\tStartTime\tFinishTime\n" +
"\n" +
"Setup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\n" +
"Map\t6\t5\t\t1\t0\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:16 (12sec)\n" +
"Reduce\t1\t1\t\t0\t0\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\n" +
"Cleanup\t1\t1\t\t0\t0\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\n" +
"============================\n" +
"\n" +
"\n" +
"Analysis\n" +
"=========\n" +
"\n" +
"Time taken by best performing map task task_1317928501754_0001_m_000003: 3sec\n" +
"Average time taken by map tasks: 5sec\n" +
"Worse performing map tasks: \n" +
"TaskId\t\tTimetaken\n" +
"task_1317928501754_0001_m_000007 7sec\n" +
"task_1317928501754_0001_m_000006 6sec\n" +
"task_1317928501754_0001_m_000005 5sec\n" +
"task_1317928501754_0001_m_000004 4sec\n" +
"task_1317928501754_0001_m_000003 3sec\n" +
"The last map task task_1317928501754_0001_m_000007 finished at (relative to the Job launch time): 6-Oct-2011 19:15:16 (14sec)\n" +
"\n" +
"Time taken by best performing shuffle task task_1317928501754_0001_r_000008: 8sec\n" +
"Average time taken by shuffle tasks: 8sec\n" +
"Worse performing shuffle tasks: \n" +
"TaskId\t\tTimetaken\n" +
"task_1317928501754_0001_r_000008 8sec\n" +
"The last shuffle task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" +
"\n" +
"Time taken by best performing reduce task task_1317928501754_0001_r_000008: 0sec\n" +
"Average time taken by reduce tasks: 0sec\n" +
"Worse performing reduce tasks: \n" +
"TaskId\t\tTimetaken\n" +
"task_1317928501754_0001_r_000008 0sec\n" +
"The last reduce task task_1317928501754_0001_r_000008 finished at (relative to the Job launch time): 6-Oct-2011 19:15:18 (16sec)\n" +
"=========\n" +
"\n" +
"FAILED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000002\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\t\t\n" +
"\n" +
"SUCCEEDED JOB_SETUP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\n" +
"====================================================\n" +
"task_1317928501754_0001_s_000001\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000007\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\t\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000006\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\t\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000005\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\t\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000004\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\t\t\n" +
"\n" +
"SUCCEEDED MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\tInputSplits\n" +
"====================================================\n" +
"task_1317928501754_0001_m_000003\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\t\t\n" +
"\n" +
"SUCCEEDED REDUCE task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\n" +
"====================================================\n" +
"task_1317928501754_0001_r_000008\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t\n" +
"\n" +
"SUCCEEDED JOB_CLEANUP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tError\n" +
"====================================================\n" +
"task_1317928501754_0001_c_000009\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\t\n" +
"\n" +
"JOB_SETUP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" +
"====================================================\n" +
"attempt_1317928501754_0001_s_000001_1\t6-Oct-2011 19:15:03\t6-Oct-2011 19:15:04 (1sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_s_000001_1\n" +
"\n" +
"MAP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" +
"====================================================\n" +
"attempt_1317928501754_0001_m_000007_1\t6-Oct-2011 19:15:09\t6-Oct-2011 19:15:16 (7sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000007_1\n" +
"attempt_1317928501754_0001_m_000002_1\t6-Oct-2011 19:15:04\t6-Oct-2011 19:15:06 (2sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000002_1\n" +
"attempt_1317928501754_0001_m_000006_1\t6-Oct-2011 19:15:08\t6-Oct-2011 19:15:14 (6sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000006_1\n" +
"attempt_1317928501754_0001_m_000005_1\t6-Oct-2011 19:15:07\t6-Oct-2011 19:15:12 (5sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000005_1\n" +
"attempt_1317928501754_0001_m_000004_1\t6-Oct-2011 19:15:06\t6-Oct-2011 19:15:10 (4sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000004_1\n" +
"attempt_1317928501754_0001_m_000003_1\t6-Oct-2011 19:15:05\t6-Oct-2011 19:15:08 (3sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000003_1\n" +
"\n" +
"REDUCE task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tShuffleFinished\tSortFinished\tFinishTime\tHostName\tError\tTaskLogs\n" +
"====================================================\n" +
"attempt_1317928501754_0001_r_000008_1\t6-Oct-2011 19:15:10\t6-Oct-2011 19:15:18 (8sec)\t6-Oct-2011 19:15:18 (0sec)6-Oct-2011 19:15:18 (8sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_r_000008_1\n" +
"\n" +
"JOB_CLEANUP task list for job_1317928501754_0001\n" +
"TaskId\t\tStartTime\tFinishTime\tHostName\tError\tTaskLogs\n" +
"====================================================\n" +
"attempt_1317928501754_0001_c_000009_1\t6-Oct-2011 19:15:11\t6-Oct-2011 19:15:20 (9sec)\tlocalhost\thttp://t:1234/tasklog?attemptid=attempt_1317928501754_0001_c_000009_1\n" +
"\n" +
"FAILED task attempts by nodes\n" +
"Hostname\tFailedTasks\n" +
"===============================\n" +
"localhost\ttask_1317928501754_0001_m_000002, \n", outStr);
}
}
@Test
public void testJSONPrinter() throws Exception {
JobHistoryParser.JobInfo job = createJobInfo();
JSONHistoryViewerPrinter printer =
new JSONHistoryViewerPrinter(job, false, "http://");
String outStr = run(printer);
JSONAssert.assertEquals("{\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"mapValue\": 5,\n" +
" \"reduceValue\": 5,\n" +
" \"totalValue\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"mapValue\": 10,\n" +
" \"reduceValue\": 10,\n" +
" \"totalValue\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"mapValue\": 15,\n" +
" \"reduceValue\": 15,\n" +
" \"totalValue\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishedAt\": 1317928516754,\n" +
" \"hadoopJob\": \"job_1317928501754_0001\",\n" +
" \"jobConf\": \"/tmp/job.xml\",\n" +
" \"jobName\": \"my job\",\n" +
" \"launchedAt\": 1317928502754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"submittedAt\": 1317928501754,\n" +
" \"taskSummary\": {\n" +
" \"cleanup\": {\n" +
" \"failed\": 0,\n" +
" \"finishTime\": 1317928520754,\n" +
" \"killed\": 0,\n" +
" \"startTime\": 1317928511754,\n" +
" \"successful\": 1,\n" +
" \"total\": 1\n" +
" },\n" +
" \"map\": {\n" +
" \"failed\": 1,\n" +
" \"finishTime\": 1317928516754,\n" +
" \"killed\": 0,\n" +
" \"startTime\": 1317928504754,\n" +
" \"successful\": 5,\n" +
" \"total\": 6\n" +
" },\n" +
" \"reduce\": {\n" +
" \"failed\": 0,\n" +
" \"finishTime\": 1317928518754,\n" +
" \"killed\": 0,\n" +
" \"startTime\": 1317928510754,\n" +
" \"successful\": 1,\n" +
" \"total\": 1\n" +
" },\n" +
" \"setup\": {\n" +
" \"failed\": 0,\n" +
" \"finishTime\": 1317928504754,\n" +
" \"killed\": 0,\n" +
" \"startTime\": 1317928503754,\n" +
" \"successful\": 1,\n" +
" \"total\": 1\n" +
" }\n" +
" },\n" +
" \"tasks\": [\n" +
" {\n" +
" \"finishTime\": 1317928506754,\n" +
" \"inputSplits\": \"\",\n" +
" \"startTime\": 1317928504754,\n" +
" \"status\": \"FAILED\",\n" +
" \"taskId\": \"task_1317928501754_0001_m_000002\",\n" +
" \"type\": \"MAP\"\n" +
" }\n" +
" ],\n" +
" \"user\": \"rkanter\"\n" +
"}\n", outStr, JSONCompareMode.NON_EXTENSIBLE);
}
@Test
public void testJSONPrinterAll() throws Exception {
JobHistoryParser.JobInfo job = createJobInfo();
JSONHistoryViewerPrinter printer =
new JSONHistoryViewerPrinter(job, true, "http://");
String outStr = run(printer);
JSONAssert.assertEquals("{\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"mapValue\": 5,\n" +
" \"reduceValue\": 5,\n" +
" \"totalValue\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"mapValue\": 10,\n" +
" \"reduceValue\": 10,\n" +
" \"totalValue\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"mapValue\": 15,\n" +
" \"reduceValue\": 15,\n" +
" \"totalValue\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishedAt\": 1317928516754,\n" +
" \"hadoopJob\": \"job_1317928501754_0001\",\n" +
" \"jobConf\": \"/tmp/job.xml\",\n" +
" \"jobName\": \"my job\",\n" +
" \"launchedAt\": 1317928502754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"submittedAt\": 1317928501754,\n" +
" \"taskSummary\": {\n" +
" \"cleanup\": {\n" +
" \"failed\": 0,\n" +
" \"finishTime\": 1317928520754,\n" +
" \"killed\": 0,\n" +
" \"startTime\": 1317928511754,\n" +
" \"successful\": 1,\n" +
" \"total\": 1\n" +
" },\n" +
" \"map\": {\n" +
" \"failed\": 1,\n" +
" \"finishTime\": 1317928516754,\n" +
" \"killed\": 0,\n" +
" \"startTime\": 1317928504754,\n" +
" \"successful\": 5,\n" +
" \"total\": 6\n" +
" },\n" +
" \"reduce\": {\n" +
" \"failed\": 0,\n" +
" \"finishTime\": 1317928518754,\n" +
" \"killed\": 0,\n" +
" \"startTime\": 1317928510754,\n" +
" \"successful\": 1,\n" +
" \"total\": 1\n" +
" },\n" +
" \"setup\": {\n" +
" \"failed\": 0,\n" +
" \"finishTime\": 1317928504754,\n" +
" \"killed\": 0,\n" +
" \"startTime\": 1317928503754,\n" +
" \"successful\": 1,\n" +
" \"total\": 1\n" +
" }\n" +
" },\n" +
" \"tasks\": [\n" +
" {\n" +
" \"attempts\": {\n" +
" \"attemptId\": \"attempt_1317928501754_0001_m_000002_1\",\n" +
" \"finishTime\": 1317928506754,\n" +
" \"hostName\": \"localhost\",\n" +
" \"startTime\": 1317928504754,\n" +
" \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000002_1\"\n" +
" },\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"value\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishTime\": 1317928506754,\n" +
" \"inputSplits\": \"\",\n" +
" \"startTime\": 1317928504754,\n" +
" \"status\": \"FAILED\",\n" +
" \"taskId\": \"task_1317928501754_0001_m_000002\",\n" +
" \"type\": \"MAP\"\n" +
" },\n" +
" {\n" +
" \"attempts\": {\n" +
" \"attemptId\": \"attempt_1317928501754_0001_s_000001_1\",\n" +
" \"finishTime\": 1317928504754,\n" +
" \"hostName\": \"localhost\",\n" +
" \"startTime\": 1317928503754,\n" +
" \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_s_000001_1\"\n" +
" },\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"value\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishTime\": 1317928504754,\n" +
" \"startTime\": 1317928503754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"taskId\": \"task_1317928501754_0001_s_000001\",\n" +
" \"type\": \"JOB_SETUP\"\n" +
" },\n" +
" {\n" +
" \"attempts\": {\n" +
" \"attemptId\": \"attempt_1317928501754_0001_m_000006_1\",\n" +
" \"finishTime\": 1317928514754,\n" +
" \"hostName\": \"localhost\",\n" +
" \"startTime\": 1317928508754,\n" +
" \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000006_1\"\n" +
" },\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"value\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishTime\": 1317928514754,\n" +
" \"inputSplits\": \"\",\n" +
" \"startTime\": 1317928508754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"taskId\": \"task_1317928501754_0001_m_000006\",\n" +
" \"type\": \"MAP\"\n" +
" },\n" +
" {\n" +
" \"attempts\": {\n" +
" \"attemptId\": \"attempt_1317928501754_0001_m_000005_1\",\n" +
" \"finishTime\": 1317928512754,\n" +
" \"hostName\": \"localhost\",\n" +
" \"startTime\": 1317928507754,\n" +
" \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000005_1\"\n" +
" },\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"value\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishTime\": 1317928512754,\n" +
" \"inputSplits\": \"\",\n" +
" \"startTime\": 1317928507754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"taskId\": \"task_1317928501754_0001_m_000005\",\n" +
" \"type\": \"MAP\"\n" +
" },\n" +
" {\n" +
" \"attempts\": {\n" +
" \"attemptId\": \"attempt_1317928501754_0001_m_000004_1\",\n" +
" \"finishTime\": 1317928510754,\n" +
" \"hostName\": \"localhost\",\n" +
" \"startTime\": 1317928506754,\n" +
" \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000004_1\"\n" +
" },\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"value\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishTime\": 1317928510754,\n" +
" \"inputSplits\": \"\",\n" +
" \"startTime\": 1317928506754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"taskId\": \"task_1317928501754_0001_m_000004\",\n" +
" \"type\": \"MAP\"\n" +
" },\n" +
" {\n" +
" \"attempts\": {\n" +
" \"attemptId\": \"attempt_1317928501754_0001_m_000003_1\",\n" +
" \"finishTime\": 1317928508754,\n" +
" \"hostName\": \"localhost\",\n" +
" \"startTime\": 1317928505754,\n" +
" \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000003_1\"\n" +
" },\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"value\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishTime\": 1317928508754,\n" +
" \"inputSplits\": \"\",\n" +
" \"startTime\": 1317928505754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"taskId\": \"task_1317928501754_0001_m_000003\",\n" +
" \"type\": \"MAP\"\n" +
" },\n" +
" {\n" +
" \"attempts\": {\n" +
" \"attemptId\": \"attempt_1317928501754_0001_c_000009_1\",\n" +
" \"finishTime\": 1317928520754,\n" +
" \"hostName\": \"localhost\",\n" +
" \"startTime\": 1317928511754,\n" +
" \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_c_000009_1\"\n" +
" },\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"value\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishTime\": 1317928520754,\n" +
" \"startTime\": 1317928511754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"taskId\": \"task_1317928501754_0001_c_000009\",\n" +
" \"type\": \"JOB_CLEANUP\"\n" +
" },\n" +
" {\n" +
" \"attempts\": {\n" +
" \"attemptId\": \"attempt_1317928501754_0001_m_000007_1\",\n" +
" \"finishTime\": 1317928516754,\n" +
" \"hostName\": \"localhost\",\n" +
" \"startTime\": 1317928509754,\n" +
" \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_m_000007_1\"\n" +
" },\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"value\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishTime\": 1317928516754,\n" +
" \"inputSplits\": \"\",\n" +
" \"startTime\": 1317928509754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"taskId\": \"task_1317928501754_0001_m_000007\",\n" +
" \"type\": \"MAP\"\n" +
" },\n" +
" {\n" +
" \"attempts\": {\n" +
" \"attemptId\": \"attempt_1317928501754_0001_r_000008_1\",\n" +
" \"finishTime\": 1317928518754,\n" +
" \"hostName\": \"localhost\",\n" +
" \"shuffleFinished\": 1317928518754,\n" +
" \"sortFinished\": 1317928518754,\n" +
" \"startTime\": 1317928510754,\n" +
" \"taskLogs\": \"http://t:1234/tasklog?attemptid=attempt_1317928501754_0001_r_000008_1\"\n" +
" },\n" +
" \"counters\": {\n" +
" \"group1\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 5\n" +
" },\n" +
" {\n" +
" \"counterName\": \"counter2\",\n" +
" \"value\": 10\n" +
" }\n" +
" ],\n" +
" \"group2\": [\n" +
" {\n" +
" \"counterName\": \"counter1\",\n" +
" \"value\": 15\n" +
" }\n" +
" ]\n" +
" },\n" +
" \"finishTime\": 1317928518754,\n" +
" \"startTime\": 1317928510754,\n" +
" \"status\": \"SUCCEEDED\",\n" +
" \"taskId\": \"task_1317928501754_0001_r_000008\",\n" +
" \"type\": \"REDUCE\"\n" +
" }\n" +
" ],\n" +
" \"user\": \"rkanter\"\n" +
"}\n", outStr, JSONCompareMode.NON_EXTENSIBLE);
}
private String run(HistoryViewerPrinter printer) throws Exception {
ByteArrayOutputStream boas = new ByteArrayOutputStream();
PrintStream out = new PrintStream(boas, true);
printer.print(out);
out.close();
String outStr = boas.toString("UTF-8");
LOG.info("out = " + outStr);
return outStr;
}
private static JobHistoryParser.JobInfo createJobInfo() {
JobHistoryParser.JobInfo job = new JobHistoryParser.JobInfo();
job.submitTime = 1317928501754L;
job.finishTime = job.submitTime + 15000;
job.jobid = JobID.forName("job_1317928501754_0001");
job.username = "rkanter";
job.jobname = "my job";
job.jobQueueName = "my queue";
job.jobConfPath = "/tmp/job.xml";
job.launchTime = job.submitTime + 1000;
job.totalMaps = 5;
job.totalReduces = 1;
job.failedMaps = 1;
job.failedReduces = 0;
job.finishedMaps = 5;
job.finishedReduces = 1;
job.jobStatus = JobStatus.State.SUCCEEDED.name();
job.totalCounters = createCounters();
job.mapCounters = createCounters();
job.reduceCounters = createCounters();
job.tasksMap = new HashMap<>();
addTaskInfo(job, TaskType.JOB_SETUP, 1, TaskStatus.State.SUCCEEDED);
addTaskInfo(job, TaskType.MAP, 2, TaskStatus.State.FAILED);
addTaskInfo(job, TaskType.MAP, 3, TaskStatus.State.SUCCEEDED);
addTaskInfo(job, TaskType.MAP, 4, TaskStatus.State.SUCCEEDED);
addTaskInfo(job, TaskType.MAP, 5, TaskStatus.State.SUCCEEDED);
addTaskInfo(job, TaskType.MAP, 6, TaskStatus.State.SUCCEEDED);
addTaskInfo(job, TaskType.MAP, 7, TaskStatus.State.SUCCEEDED);
addTaskInfo(job, TaskType.REDUCE, 8, TaskStatus.State.SUCCEEDED);
addTaskInfo(job, TaskType.JOB_CLEANUP, 9, TaskStatus.State.SUCCEEDED);
return job;
}
private static Counters createCounters() {
Counters counters = new Counters();
counters.findCounter("group1", "counter1").setValue(5);
counters.findCounter("group1", "counter2").setValue(10);
counters.findCounter("group2", "counter1").setValue(15);
return counters;
}
private static void addTaskInfo(JobHistoryParser.JobInfo job,
TaskType type, int id, TaskStatus.State status) {
JobHistoryParser.TaskInfo task = new JobHistoryParser.TaskInfo();
task.taskId = new TaskID(job.getJobId(), type, id);
task.startTime = job.getLaunchTime() + id * 1000;
task.finishTime = task.startTime + id * 1000;
task.taskType = type;
task.counters = createCounters();
task.status = status.name();
task.attemptsMap = new HashMap<>();
addTaskAttemptInfo(task, 1);
job.tasksMap.put(task.getTaskId(), task);
}
private static void addTaskAttemptInfo(
JobHistoryParser.TaskInfo task, int id) {
JobHistoryParser.TaskAttemptInfo attempt =
new JobHistoryParser.TaskAttemptInfo();
attempt.attemptId = new TaskAttemptID(
TaskID.downgrade(task.getTaskId()), id);
attempt.startTime = task.getStartTime();
attempt.finishTime = task.getFinishTime();
attempt.shuffleFinishTime = task.getFinishTime();
attempt.sortFinishTime = task.getFinishTime();
attempt.mapFinishTime = task.getFinishTime();
attempt.status = task.getTaskStatus();
attempt.taskType = task.getTaskType();
attempt.trackerName = "localhost";
attempt.httpPort = 1234;
attempt.hostname = "localhost";
task.attemptsMap.put(attempt.getAttemptId(), attempt);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.client;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteRequestBuilder;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.ExplainRequestBuilder;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequestBuilder;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.ClearScrollRequestBuilder;
import org.elasticsearch.action.search.ClearScrollResponse;
import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.MultiSearchRequestBuilder;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.search.SearchScrollRequestBuilder;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder;
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsRequestBuilder;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Releasable;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import java.util.Map;
/**
* A client provides a one stop interface for performing actions/operations against the cluster.
* <p>
* All operations performed are asynchronous by nature. Each action/operation has two flavors, the first
* simply returns an {@link org.elasticsearch.action.ActionFuture}, while the second accepts an
* {@link org.elasticsearch.action.ActionListener}.
* <p>
* A client can be retrieved from a started {@link org.elasticsearch.node.Node}.
*
* @see org.elasticsearch.node.Node#client()
*/
public interface Client extends ElasticsearchClient, Releasable {
Setting<String> CLIENT_TYPE_SETTING_S = new Setting<>("client.type", "node", (s) -> {
switch (s) {
case "node":
case "transport":
return s;
default:
throw new IllegalArgumentException("Can't parse [client.type] must be one of [node, transport]");
}
}, Property.NodeScope);
/**
* The admin client that can be used to perform administrative operations.
*/
AdminClient admin();
/**
* Index a JSON source associated with a given index.
* <p>
* The id is optional, if it is not provided, one will be generated automatically.
*
* @param request The index request
* @return The result future
* @see Requests#indexRequest(String)
*/
ActionFuture<IndexResponse> index(IndexRequest request);
/**
* Index a document associated with a given index.
* <p>
* The id is optional, if it is not provided, one will be generated automatically.
*
* @param request The index request
* @param listener A listener to be notified with a result
* @see Requests#indexRequest(String)
*/
void index(IndexRequest request, ActionListener<IndexResponse> listener);
/**
* Index a document associated with a given index.
* <p>
* The id is optional, if it is not provided, one will be generated automatically.
*/
IndexRequestBuilder prepareIndex();
/**
* Index a document associated with a given index.
* <p>
* The id is optional, if it is not provided, one will be generated automatically.
*
* @param index The index to index the document to
*/
IndexRequestBuilder prepareIndex(String index);
/**
* Updates a document based on a script.
*
* @param request The update request
* @return The result future
*/
ActionFuture<UpdateResponse> update(UpdateRequest request);
/**
* Updates a document based on a script.
*
* @param request The update request
* @param listener A listener to be notified with a result
*/
void update(UpdateRequest request, ActionListener<UpdateResponse> listener);
/**
* Updates a document based on a script.
*/
UpdateRequestBuilder prepareUpdate();
/**
* Updates a document based on a script.
*/
UpdateRequestBuilder prepareUpdate(String index, String id);
/**
* Deletes a document from the index based on the index and id.
*
* @param request The delete request
* @return The result future
* @see Requests#deleteRequest(String)
*/
ActionFuture<DeleteResponse> delete(DeleteRequest request);
/**
* Deletes a document from the index based on the index and id.
*
* @param request The delete request
* @param listener A listener to be notified with a result
* @see Requests#deleteRequest(String)
*/
void delete(DeleteRequest request, ActionListener<DeleteResponse> listener);
/**
* Deletes a document from the index based on the index and id.
*/
DeleteRequestBuilder prepareDelete();
/**
* Deletes a document from the index based on the index and id.
*
* @param index The index to delete the document from
* @param id The id of the document to delete
*/
DeleteRequestBuilder prepareDelete(String index, String id);
/**
* Executes a bulk of index / delete operations.
*
* @param request The bulk request
* @return The result future
* @see org.elasticsearch.client.Requests#bulkRequest()
*/
ActionFuture<BulkResponse> bulk(BulkRequest request);
/**
* Executes a bulk of index / delete operations.
*
* @param request The bulk request
* @param listener A listener to be notified with a result
* @see org.elasticsearch.client.Requests#bulkRequest()
*/
void bulk(BulkRequest request, ActionListener<BulkResponse> listener);
/**
* Executes a bulk of index / delete operations.
*/
BulkRequestBuilder prepareBulk();
/**
* Executes a bulk of index / delete operations with default index
*/
BulkRequestBuilder prepareBulk(@Nullable String globalIndex);
/**
* Gets the document that was indexed from an index with an id.
*
* @param request The get request
* @return The result future
* @see Requests#getRequest(String)
*/
ActionFuture<GetResponse> get(GetRequest request);
/**
* Gets the document that was indexed from an index with an id.
*
* @param request The get request
* @param listener A listener to be notified with a result
* @see Requests#getRequest(String)
*/
void get(GetRequest request, ActionListener<GetResponse> listener);
/**
* Gets the document that was indexed from an index with an id.
*/
GetRequestBuilder prepareGet();
/**
* Gets the document that was indexed from an index with an id.
*/
GetRequestBuilder prepareGet(String index, String id);
/**
* Multi get documents.
*/
ActionFuture<MultiGetResponse> multiGet(MultiGetRequest request);
/**
* Multi get documents.
*/
void multiGet(MultiGetRequest request, ActionListener<MultiGetResponse> listener);
/**
* Multi get documents.
*/
MultiGetRequestBuilder prepareMultiGet();
/**
* Search across one or more indices with a query.
*
* @param request The search request
* @return The result future
* @see Requests#searchRequest(String...)
*/
ActionFuture<SearchResponse> search(SearchRequest request);
/**
* Search across one or more indices with a query.
*
* @param request The search request
* @param listener A listener to be notified of the result
* @see Requests#searchRequest(String...)
*/
void search(SearchRequest request, ActionListener<SearchResponse> listener);
/**
* Search across one or more indices with a query.
*/
SearchRequestBuilder prepareSearch(String... indices);
/**
* A search scroll request to continue searching a previous scrollable search request.
*
* @param request The search scroll request
* @return The result future
* @see Requests#searchScrollRequest(String)
*/
ActionFuture<SearchResponse> searchScroll(SearchScrollRequest request);
/**
* A search scroll request to continue searching a previous scrollable search request.
*
* @param request The search scroll request
* @param listener A listener to be notified of the result
* @see Requests#searchScrollRequest(String)
*/
void searchScroll(SearchScrollRequest request, ActionListener<SearchResponse> listener);
/**
* A search scroll request to continue searching a previous scrollable search request.
*/
SearchScrollRequestBuilder prepareSearchScroll(String scrollId);
/**
* Performs multiple search requests.
*/
ActionFuture<MultiSearchResponse> multiSearch(MultiSearchRequest request);
/**
* Performs multiple search requests.
*/
void multiSearch(MultiSearchRequest request, ActionListener<MultiSearchResponse> listener);
/**
* Performs multiple search requests.
*/
MultiSearchRequestBuilder prepareMultiSearch();
/**
* An action that returns the term vectors for a specific document.
*
* @param request The term vector request
* @return The response future
*/
ActionFuture<TermVectorsResponse> termVectors(TermVectorsRequest request);
/**
* An action that returns the term vectors for a specific document.
*
* @param request The term vector request
*/
void termVectors(TermVectorsRequest request, ActionListener<TermVectorsResponse> listener);
/**
* Builder for the term vector request.
*/
TermVectorsRequestBuilder prepareTermVectors();
/**
* Builder for the term vector request.
*
* @param index The index to load the document from
* @param id The id of the document
*/
TermVectorsRequestBuilder prepareTermVectors(String index, String id);
/**
* Multi get term vectors.
*/
ActionFuture<MultiTermVectorsResponse> multiTermVectors(MultiTermVectorsRequest request);
/**
* Multi get term vectors.
*/
void multiTermVectors(MultiTermVectorsRequest request, ActionListener<MultiTermVectorsResponse> listener);
/**
* Multi get term vectors.
*/
MultiTermVectorsRequestBuilder prepareMultiTermVectors();
/**
* Computes a score explanation for the specified request.
*
* @param index The index this explain is targeted for
* @param id The document identifier this explain is targeted for
*/
ExplainRequestBuilder prepareExplain(String index, String id);
/**
* Computes a score explanation for the specified request.
*
* @param request The request encapsulating the query and document identifier to compute a score explanation for
*/
ActionFuture<ExplainResponse> explain(ExplainRequest request);
/**
* Computes a score explanation for the specified request.
*
* @param request The request encapsulating the query and document identifier to compute a score explanation for
* @param listener A listener to be notified of the result
*/
void explain(ExplainRequest request, ActionListener<ExplainResponse> listener);
/**
* Clears the search contexts associated with specified scroll ids.
*/
ClearScrollRequestBuilder prepareClearScroll();
/**
* Clears the search contexts associated with specified scroll ids.
*/
ActionFuture<ClearScrollResponse> clearScroll(ClearScrollRequest request);
/**
* Clears the search contexts associated with specified scroll ids.
*/
void clearScroll(ClearScrollRequest request, ActionListener<ClearScrollResponse> listener);
/**
* Builder for the field capabilities request.
*/
FieldCapabilitiesRequestBuilder prepareFieldCaps(String... indices);
/**
* An action that returns the field capabilities from the provided request
*/
ActionFuture<FieldCapabilitiesResponse> fieldCaps(FieldCapabilitiesRequest request);
/**
* An action that returns the field capabilities from the provided request
*/
void fieldCaps(FieldCapabilitiesRequest request, ActionListener<FieldCapabilitiesResponse> listener);
/**
* Returns this clients settings
*/
Settings settings();
/**
* Returns a new lightweight Client that applies all given headers to each of the requests
* issued from it.
*/
Client filterWithHeader(Map<String, String> headers);
/**
* Returns a client to a remote cluster with the given cluster alias.
*
* @throws IllegalArgumentException if the given clusterAlias doesn't exist
* @throws UnsupportedOperationException if this functionality is not available on this client.
*/
default Client getRemoteClusterClient(String clusterAlias) {
throw new UnsupportedOperationException("this client doesn't support remote cluster connections");
}
}
| |
/*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.config;
import com.hazelcast.nio.serialization.ClassDefinition;
import com.hazelcast.nio.serialization.DataSerializableFactory;
import com.hazelcast.nio.serialization.PortableFactory;
import java.nio.ByteOrder;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import static com.hazelcast.util.Preconditions.isNotNull;
/**
* Contains the serialization configuration of {@link com.hazelcast.core.HazelcastInstance}.
*/
public class SerializationConfig {
private int portableVersion;
private Map<Integer, String> dataSerializableFactoryClasses;
private Map<Integer, DataSerializableFactory> dataSerializableFactories;
private Map<Integer, String> portableFactoryClasses;
private Map<Integer, PortableFactory> portableFactories;
private GlobalSerializerConfig globalSerializerConfig;
private Collection<SerializerConfig> serializerConfigs;
private boolean checkClassDefErrors = true;
private boolean useNativeByteOrder;
private ByteOrder byteOrder = ByteOrder.BIG_ENDIAN;
private boolean enableCompression;
private boolean enableSharedObject = true;
private boolean allowUnsafe;
private Set<ClassDefinition> classDefinitions;
public SerializationConfig() {
}
/**
* @return the global serializer configuration
* @see {@link com.hazelcast.config.GlobalSerializerConfig}
*/
public GlobalSerializerConfig getGlobalSerializerConfig() {
return globalSerializerConfig;
}
/**
* @param globalSerializerConfig configuration of serializer that will be used
* if no other serializer is applicable
* @return GlobalSerializerConfig
*/
public SerializationConfig setGlobalSerializerConfig(GlobalSerializerConfig globalSerializerConfig) {
this.globalSerializerConfig = globalSerializerConfig;
return this;
}
/**
* @return list of {@link com.hazelcast.config.SerializerConfig}s
*/
public Collection<SerializerConfig> getSerializerConfigs() {
if (serializerConfigs == null) {
serializerConfigs = new LinkedList<SerializerConfig>();
}
return serializerConfigs;
}
/**
* @param serializerConfig serializer configuration of a class type
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
*/
public SerializationConfig addSerializerConfig(SerializerConfig serializerConfig) {
getSerializerConfigs().add(serializerConfig);
return this;
}
/**
* @param serializerConfigs lists of serializer configs that will be registered
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
*/
public SerializationConfig setSerializerConfigs(Collection<SerializerConfig> serializerConfigs) {
this.serializerConfigs = serializerConfigs;
return this;
}
/**
* Portable version will be used to differentiate two versions of the same class that have changes on the class,
* like adding/removing a field or changing a type of a field.
*
* @return version of portable classes
*/
public int getPortableVersion() {
return portableVersion;
}
/**
* @param portableVersion int value for the version of portable classes
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
*/
public SerializationConfig setPortableVersion(int portableVersion) {
if (portableVersion < 0) {
throw new IllegalArgumentException("Portable version cannot be negative!");
}
this.portableVersion = portableVersion;
return this;
}
/**
* @return map of factory id and corresponding factory class names
* @see {@link com.hazelcast.nio.serialization.DataSerializableFactory}
*/
public Map<Integer, String> getDataSerializableFactoryClasses() {
if (dataSerializableFactoryClasses == null) {
dataSerializableFactoryClasses = new HashMap<Integer, String>();
}
return dataSerializableFactoryClasses;
}
/**
* @param dataSerializableFactoryClasses map of factory id and corresponding factory class names
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.DataSerializableFactory}
*/
public SerializationConfig setDataSerializableFactoryClasses(Map<Integer, String> dataSerializableFactoryClasses) {
this.dataSerializableFactoryClasses = dataSerializableFactoryClasses;
return this;
}
/**
* @param factoryId factory id of dataSerializableFactory to be registered
* @param dataSerializableFactoryClass name of dataSerializableFactory class to be registered
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.DataSerializableFactory}
*/
public SerializationConfig addDataSerializableFactoryClass(int factoryId, String dataSerializableFactoryClass) {
getDataSerializableFactoryClasses().put(factoryId, dataSerializableFactoryClass);
return this;
}
/**
* @param factoryId factory id of dataSerializableFactory to be registered
* @param dataSerializableFactoryClass dataSerializableFactory class to be registered
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.DataSerializableFactory}
*/
public SerializationConfig addDataSerializableFactoryClass(int factoryId, Class<?
extends DataSerializableFactory> dataSerializableFactoryClass) {
String factoryClassName = isNotNull(dataSerializableFactoryClass, "dataSerializableFactoryClass").getName();
return addDataSerializableFactoryClass(factoryId, factoryClassName);
}
/**
* @return map of factory id and corresponding dataSerializable factories
* @see {@link com.hazelcast.nio.serialization.DataSerializableFactory}
*/
public Map<Integer, DataSerializableFactory> getDataSerializableFactories() {
if (dataSerializableFactories == null) {
dataSerializableFactories = new HashMap<Integer, DataSerializableFactory>();
}
return dataSerializableFactories;
}
/**
* @param dataSerializableFactories map of factory id and corresponding dataSerializable objects
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.DataSerializableFactory}
*/
public SerializationConfig setDataSerializableFactories(Map<Integer, DataSerializableFactory> dataSerializableFactories) {
this.dataSerializableFactories = dataSerializableFactories;
return this;
}
/**
* @param factoryId factory id of DataSerializableFactory to be registered
* @param dataSerializableFactory DataSerializableFactory object to be registered
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.DataSerializableFactory}
*/
public SerializationConfig addDataSerializableFactory(int factoryId, DataSerializableFactory dataSerializableFactory) {
getDataSerializableFactories().put(factoryId, dataSerializableFactory);
return this;
}
/**
* @return map of factory id and corresponding portable factory names
* @see {@link com.hazelcast.nio.serialization.PortableFactory}
*/
public Map<Integer, String> getPortableFactoryClasses() {
if (portableFactoryClasses == null) {
portableFactoryClasses = new HashMap<Integer, String>();
}
return portableFactoryClasses;
}
/**
* @param portableFactoryClasses map of factory id and corresponding factory class names
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.PortableFactory}
*/
public SerializationConfig setPortableFactoryClasses(Map<Integer, String> portableFactoryClasses) {
this.portableFactoryClasses = portableFactoryClasses;
return this;
}
/**
* @param factoryId factory id of portableFactory to be registered
* @param portableFactoryClass portableFactory class to be registered
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.PortableFactory}
*/
public SerializationConfig addPortableFactoryClass(int factoryId, Class<? extends PortableFactory> portableFactoryClass) {
String portableFactoryClassName = isNotNull(portableFactoryClass, "portableFactoryClass").getName();
return addPortableFactoryClass(factoryId, portableFactoryClassName);
}
/**
* @param factoryId factory id of portableFactory to be registered
* @param portableFactoryClass name of the portableFactory class to be registered
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.PortableFactory}
*/
public SerializationConfig addPortableFactoryClass(int factoryId, String portableFactoryClass) {
getPortableFactoryClasses().put(factoryId, portableFactoryClass);
return this;
}
/**
* @return map of factory id and corresponding portable factories
* @see {@link com.hazelcast.nio.serialization.PortableFactory}
*/
public Map<Integer, PortableFactory> getPortableFactories() {
if (portableFactories == null) {
portableFactories = new HashMap<Integer, PortableFactory>();
}
return portableFactories;
}
/**
* @param portableFactories map of factory id and corresponding factory objects
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.PortableFactory}
*/
public SerializationConfig setPortableFactories(Map<Integer, PortableFactory> portableFactories) {
this.portableFactories = portableFactories;
return this;
}
/**
* @param factoryId factory id of portableFactory to be registered
* @param portableFactory portableFactory object to be registered
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link com.hazelcast.nio.serialization.PortableFactory}
*/
public SerializationConfig addPortableFactory(int factoryId, PortableFactory portableFactory) {
getPortableFactories().put(factoryId, portableFactory);
return this;
}
/**
* @return registered class definitions of portable classes
* @see {@link ClassDefinition}
*/
public Set<ClassDefinition> getClassDefinitions() {
if (classDefinitions == null) {
classDefinitions = new HashSet<ClassDefinition>();
}
return classDefinitions;
}
/**
* @param classDefinition the class definition to be registered
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link ClassDefinition}
*/
public SerializationConfig addClassDefinition(ClassDefinition classDefinition) {
if (!getClassDefinitions().add(classDefinition)) {
throw new IllegalArgumentException("ClassDefinition for class-id[" + classDefinition.getClassId()
+ "] already exists!");
}
return this;
}
/**
* @param classDefinitions set of class definitions to be registered
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
* @see {@link ClassDefinition}
*/
public SerializationConfig setClassDefinitions(Set<ClassDefinition> classDefinitions) {
this.classDefinitions = classDefinitions;
return this;
}
/**
* Default value is true (enabled).
* When enabled, serialization system will check for class definitions error at start and throw an Serialization
* Exception with error definition.
*
* @return true if enabled.
*/
public boolean isCheckClassDefErrors() {
return checkClassDefErrors;
}
/**
* When enabled, serialization system will check for class definitions error at start and throw an Serialization
* Exception with error definition.
*
* @param checkClassDefErrors set to false to disable.
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
*/
public SerializationConfig setCheckClassDefErrors(boolean checkClassDefErrors) {
this.checkClassDefErrors = checkClassDefErrors;
return this;
}
/**
* @return true if serialization is configured to use native byte order of the underlying platform.
*/
public boolean isUseNativeByteOrder() {
return useNativeByteOrder;
}
/**
* @param useNativeByteOrder set to true to use native byte order of the underlying platform.
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
*/
public SerializationConfig setUseNativeByteOrder(boolean useNativeByteOrder) {
this.useNativeByteOrder = useNativeByteOrder;
return this;
}
/**
* Note that result of useNativeByteOrder is not reflected to return value of this method.
*
* @return configured byte order
*/
public ByteOrder getByteOrder() {
return byteOrder;
}
/**
* Not that configuring use native byte order as enabled will override the byte order set by this method.
*
* @param byteOrder that serialization will use.
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
*/
public SerializationConfig setByteOrder(ByteOrder byteOrder) {
this.byteOrder = byteOrder;
return this;
}
/**
* Enables compression when default java serialization is used.
*
* @return true if compression enabled.
*/
public boolean isEnableCompression() {
return enableCompression;
}
/**
* Enables compression when default java serialization is used.
*
* @param enableCompression set to true to enable compression
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
*/
public SerializationConfig setEnableCompression(boolean enableCompression) {
this.enableCompression = enableCompression;
return this;
}
/**
* Default value is true.
* Enables shared object when default java serialization is used.
*
* @return true if enabled.
*/
public boolean isEnableSharedObject() {
return enableSharedObject;
}
/**
* Enables shared object when default java serialization is used.
*
* @param enableSharedObject set to false to disable
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
*/
public SerializationConfig setEnableSharedObject(boolean enableSharedObject) {
this.enableSharedObject = enableSharedObject;
return this;
}
/**
* Default value is false.
* Unsafe, it is not public api of java. Use with caution!
*
* @return true if using unsafe is allowed
*/
public boolean isAllowUnsafe() {
return allowUnsafe;
}
/**
* Unsafe, it is not public api of java. Use with caution!
*
* @param allowUnsafe set to true to allow usage of unsafe
* @return configured {@link com.hazelcast.config.SerializerConfig} for chaining
*/
public SerializationConfig setAllowUnsafe(boolean allowUnsafe) {
this.allowUnsafe = allowUnsafe;
return this;
}
@Override
public String toString() {
return "SerializationConfig{"
+ "portableVersion=" + portableVersion
+ ", dataSerializableFactoryClasses=" + dataSerializableFactoryClasses
+ ", dataSerializableFactories=" + dataSerializableFactories
+ ", portableFactoryClasses=" + portableFactoryClasses
+ ", portableFactories=" + portableFactories
+ ", globalSerializerConfig=" + globalSerializerConfig
+ ", serializerConfigs=" + serializerConfigs
+ ", checkClassDefErrors=" + checkClassDefErrors
+ ", classDefinitions=" + classDefinitions
+ ", byteOrder=" + byteOrder
+ ", useNativeByteOrder=" + useNativeByteOrder
+ '}';
}
}
| |
/**
* Copyright 2014 Microsoft Open Technologies Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microsoftopentechnologies.intellij.wizards.createvm;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.microsoftopentechnologies.intellij.helpers.UIHelper;
import com.microsoftopentechnologies.intellij.helpers.azure.AzureCmdException;
import com.microsoftopentechnologies.intellij.helpers.azure.sdk.AzureSDKManagerImpl;
import com.microsoftopentechnologies.intellij.model.Subscription;
import com.microsoftopentechnologies.intellij.model.vm.AffinityGroup;
import com.microsoftopentechnologies.intellij.model.vm.CloudService;
import com.microsoftopentechnologies.intellij.model.vm.Location;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import java.awt.*;
import java.awt.event.*;
import java.util.List;
import java.util.Vector;
public class CreateCloudServiceForm extends JDialog {
private JPanel contentPane;
private JButton buttonOK;
private JButton buttonCancel;
private JComboBox subscriptionComboBox;
private JTextField nameTextField;
private JComboBox regionOrAffinityGroupComboBox;
private JProgressBar createProgressBar;
private Subscription subscription;
private CloudService cloudService;
private Runnable onCreate;
public CreateCloudServiceForm() {
setContentPane(contentPane);
setModal(true);
getRootPane().setDefaultButton(buttonOK);
setTitle("Create Cloud Service");
buttonOK.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
onOK();
}
});
buttonCancel.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
onCancel();
}
});
setDefaultCloseOperation(DISPOSE_ON_CLOSE);
addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
onCancel();
}
});
contentPane.registerKeyboardAction(new ActionListener() {
public void actionPerformed(ActionEvent e) {
onCancel();
}
}, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
regionOrAffinityGroupComboBox.setRenderer(new DefaultListCellRenderer() {
@Override
public Component getListCellRendererComponent(JList jList, Object o, int i, boolean b, boolean b1) {
return (o instanceof String) ?
super.getListCellRendererComponent(jList, o, i, b, b1)
: super.getListCellRendererComponent(jList, " " + o.toString(), i, b, b1);
}
});
nameTextField.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent documentEvent) {
validateEmptyFields();
}
@Override
public void removeUpdate(DocumentEvent documentEvent) {
validateEmptyFields();
}
@Override
public void changedUpdate(DocumentEvent documentEvent) {
validateEmptyFields();
}
});
regionOrAffinityGroupComboBox.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent itemEvent) {
validateEmptyFields();
}
});
}
private void validateEmptyFields() {
boolean allFieldsCompleted = !(
nameTextField.getText().isEmpty() || regionOrAffinityGroupComboBox.getSelectedObjects().length == 0);
buttonOK.setEnabled(allFieldsCompleted);
}
public void fillFields(final Subscription subscription, Project project) {
this.subscription = subscription;
subscriptionComboBox.addItem(subscription.getName());
regionOrAffinityGroupComboBox.addItem("<Loading...>");
ProgressManager.getInstance().run(new Task.Backgroundable(project, "Loading regions...", false) {
@Override
public void run(ProgressIndicator progressIndicator) {
progressIndicator.setIndeterminate(true);
try {
final List<AffinityGroup> affinityGroups = AzureSDKManagerImpl.getManager().getAffinityGroups(subscription.getId().toString());
final List<Location> locations = AzureSDKManagerImpl.getManager().getLocations(subscription.getId().toString());
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
final Vector<Object> vector = new Vector<Object>();
vector.add("Regions");
vector.addAll(locations);
if (affinityGroups.size() > 0) {
vector.add("Affinity Groups");
vector.addAll(affinityGroups);
}
regionOrAffinityGroupComboBox.removeAllItems();
regionOrAffinityGroupComboBox.setModel(new DefaultComboBoxModel(vector) {
public void setSelectedItem(Object o) {
if (!(o instanceof String)) {
super.setSelectedItem(o);
}
}
});
regionOrAffinityGroupComboBox.setSelectedIndex(1);
}
});
} catch (AzureCmdException e) {
UIHelper.showException("Error getting regions", e);
}
}
});
}
private void onOK() {
if (!nameTextField.getText().matches("^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$")) {
JOptionPane.showMessageDialog(this, "Invalid cloud service name. Cloud service name must start with a letter or number, \n" +
"contain only letters, numbers, and hyphens, " +
"and end with a letter or number.", "Error creating the cloud service", JOptionPane.ERROR_MESSAGE);
return;
}
this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
createProgressBar.setVisible(true);
try {
String name = nameTextField.getText();
Object regionOrAffinity = regionOrAffinityGroupComboBox.getSelectedItem();
String region = (regionOrAffinity instanceof Location) ? regionOrAffinity.toString() : "";
String affinityGroup = (regionOrAffinity instanceof AffinityGroup) ? regionOrAffinity.toString() : "";
cloudService = new CloudService(name, region, affinityGroup, "", true, "", true, subscription.getId().toString());
AzureSDKManagerImpl.getManager().createCloudService(cloudService);
} catch (Exception e) {
cloudService = null;
UIHelper.showException("An error occurred while trying to create the specified cloud service", e, "Error Creating Storage Account", false, true);
}
onCreate.run();
this.setCursor(Cursor.getDefaultCursor());
this.setVisible(false);
dispose();
}
private void onCancel() {
dispose();
}
public CloudService getCloudService() {
return cloudService;
}
public void setOnCreate(Runnable onCreate) {
this.onCreate = onCreate;
}
}
| |
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.account;
import com.google.gerrit.client.ErrorDialog;
import com.google.gerrit.client.Gerrit;
import com.google.gerrit.client.info.AccountInfo;
import com.google.gerrit.client.rpc.CallbackGroup;
import com.google.gerrit.client.rpc.GerritCallback;
import com.google.gerrit.client.rpc.NativeString;
import com.google.gerrit.client.rpc.Natives;
import com.google.gerrit.client.ui.ComplexDisclosurePanel;
import com.google.gerrit.client.ui.OnEditEnabler;
import com.google.gerrit.common.PageLinks;
import com.google.gerrit.common.errors.EmailException;
import com.google.gerrit.extensions.client.AccountFieldName;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.i18n.client.LocaleInfo;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.FormPanel;
import com.google.gwt.user.client.ui.FormPanel.SubmitEvent;
import com.google.gwt.user.client.ui.Grid;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.ListBox;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
import com.google.gwtexpui.globalkey.client.NpTextBox;
import com.google.gwtexpui.user.client.AutoCenterDialogBox;
class ContactPanelShort extends Composite {
protected final FlowPanel body;
protected int labelIdx;
protected int fieldIdx;
protected Button save;
private String currentEmail;
protected boolean haveAccount;
private boolean haveEmails;
NpTextBox nameTxt;
private ListBox emailPick;
private Button registerNewEmail;
private OnEditEnabler onEditEnabler;
ContactPanelShort() {
body = new FlowPanel();
initWidget(body);
}
protected void onInitUI() {
if (LocaleInfo.getCurrentLocale().isRTL()) {
labelIdx = 1;
fieldIdx = 0;
} else {
labelIdx = 0;
fieldIdx = 1;
}
nameTxt = new NpTextBox();
nameTxt.setVisibleLength(60);
nameTxt.setReadOnly(!canEditFullName());
emailPick = new ListBox();
final Grid infoPlainText = new Grid(2, 2);
infoPlainText.setStyleName(Gerrit.RESOURCES.css().infoBlock());
infoPlainText.addStyleName(Gerrit.RESOURCES.css().accountInfoBlock());
body.add(infoPlainText);
registerNewEmail = new Button(Util.C.buttonOpenRegisterNewEmail());
registerNewEmail.setEnabled(false);
registerNewEmail.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
doRegisterNewEmail();
}
});
final FlowPanel emailLine = new FlowPanel();
emailLine.add(emailPick);
if (canRegisterNewEmail()) {
emailLine.add(registerNewEmail);
}
int row = 0;
if (!Gerrit.info().auth().canEdit(AccountFieldName.USER_NAME)
&& Gerrit.info().auth().siteHasUsernames()) {
infoPlainText.resizeRows(infoPlainText.getRowCount() + 1);
row(infoPlainText, row++, Util.C.userName(), new UsernameField());
}
if (!canEditFullName()) {
FlowPanel nameLine = new FlowPanel();
nameLine.add(nameTxt);
if (Gerrit.info().auth().editFullNameUrl() != null) {
Button edit = new Button(Util.C.linkEditFullName());
edit.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
Window.open(Gerrit.info().auth().editFullNameUrl(), "_blank", null);
}
});
nameLine.add(edit);
}
Button reload = new Button(Util.C.linkReloadContact());
reload.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
Window.Location.replace(Gerrit.loginRedirect(PageLinks.SETTINGS_CONTACT));
}
});
nameLine.add(reload);
row(infoPlainText, row++, Util.C.contactFieldFullName(), nameLine);
} else {
row(infoPlainText, row++, Util.C.contactFieldFullName(), nameTxt);
}
row(infoPlainText, row++, Util.C.contactFieldEmail(), emailLine);
infoPlainText.getCellFormatter().addStyleName(0, 0, Gerrit.RESOURCES.css().topmost());
infoPlainText.getCellFormatter().addStyleName(0, 1, Gerrit.RESOURCES.css().topmost());
infoPlainText
.getCellFormatter()
.addStyleName(row - 1, 0, Gerrit.RESOURCES.css().bottomheader());
save = new Button(Util.C.buttonSaveChanges());
save.setEnabled(false);
save.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
doSave();
}
});
final ComplexDisclosurePanel mailFilterHelp =
new ComplexDisclosurePanel(Util.C.emailFilterHelpTitle(), false);
mailFilterHelp.setContent(new HTML(Util.C.emailFilterHelp()));
body.add(mailFilterHelp);
emailPick.addChangeHandler(
new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
final int idx = emailPick.getSelectedIndex();
final String v = 0 <= idx ? emailPick.getValue(idx) : null;
if (Util.C.buttonOpenRegisterNewEmail().equals(v)) {
for (int i = 0; i < emailPick.getItemCount(); i++) {
if (currentEmail.equals(emailPick.getValue(i))) {
emailPick.setSelectedIndex(i);
break;
}
}
doRegisterNewEmail();
} else {
save.setEnabled(true);
}
}
});
onEditEnabler = new OnEditEnabler(save, nameTxt);
}
private boolean canEditFullName() {
return Gerrit.info().auth().canEdit(AccountFieldName.FULL_NAME);
}
private boolean canRegisterNewEmail() {
return Gerrit.info().auth().canEdit(AccountFieldName.REGISTER_NEW_EMAIL);
}
void hideSaveButton() {
save.setVisible(false);
}
@Override
protected void onLoad() {
super.onLoad();
onInitUI();
body.add(save);
display(Gerrit.getUserAccount());
emailPick.clear();
emailPick.setEnabled(false);
registerNewEmail.setEnabled(false);
haveAccount = false;
haveEmails = false;
CallbackGroup group = new CallbackGroup();
AccountApi.getName(
"self",
group.add(
new GerritCallback<NativeString>() {
@Override
public void onSuccess(NativeString result) {
nameTxt.setText(result.asString());
haveAccount = true;
}
@Override
public void onFailure(Throwable caught) {}
}));
AccountApi.getEmails(
"self",
group.addFinal(
new GerritCallback<JsArray<EmailInfo>>() {
@Override
public void onSuccess(JsArray<EmailInfo> result) {
for (EmailInfo i : Natives.asList(result)) {
emailPick.addItem(i.email());
if (i.isPreferred()) {
currentEmail = i.email();
}
}
haveEmails = true;
postLoad();
}
}));
}
private void postLoad() {
if (haveAccount && haveEmails) {
updateEmailList();
registerNewEmail.setEnabled(true);
save.setEnabled(false);
onEditEnabler.updateOriginalValue(nameTxt);
}
display();
}
void display() {}
protected void row(Grid info, int row, String name, Widget field) {
info.setText(row, labelIdx, name);
info.setWidget(row, fieldIdx, field);
info.getCellFormatter().addStyleName(row, 0, Gerrit.RESOURCES.css().header());
}
protected void display(AccountInfo account) {
currentEmail = account.email();
nameTxt.setText(account.name());
save.setEnabled(false);
onEditEnabler.updateOriginalValue(nameTxt);
}
private void doRegisterNewEmail() {
if (!canRegisterNewEmail()) {
return;
}
final AutoCenterDialogBox box = new AutoCenterDialogBox(true, true);
final VerticalPanel body = new VerticalPanel();
final NpTextBox inEmail = new NpTextBox();
inEmail.setVisibleLength(60);
final Button register = new Button(Util.C.buttonSendRegisterNewEmail());
final Button cancel = new Button(Util.C.buttonCancel());
final FormPanel form = new FormPanel();
form.addSubmitHandler(
new FormPanel.SubmitHandler() {
@Override
public void onSubmit(SubmitEvent event) {
event.cancel();
final String addr = inEmail.getText().trim();
if (!addr.contains("@")) {
new ErrorDialog(Util.C.invalidUserEmail()).center();
return;
}
inEmail.setEnabled(false);
register.setEnabled(false);
AccountApi.registerEmail(
"self",
addr,
new GerritCallback<EmailInfo>() {
@Override
public void onSuccess(EmailInfo result) {
box.hide();
if (Gerrit.info().auth().isDev()) {
currentEmail = addr;
if (emailPick.getItemCount() == 0) {
AccountInfo me = Gerrit.getUserAccount();
me.email(addr);
onSaveSuccess(me);
} else {
save.setEnabled(true);
}
updateEmailList();
}
}
@Override
public void onFailure(Throwable caught) {
inEmail.setEnabled(true);
register.setEnabled(true);
if (caught.getMessage().startsWith(EmailException.MESSAGE)) {
final ErrorDialog d =
new ErrorDialog(
caught.getMessage().substring(EmailException.MESSAGE.length()));
d.setText(Util.C.errorDialogTitleRegisterNewEmail());
d.center();
} else {
super.onFailure(caught);
}
}
});
}
});
form.setWidget(body);
register.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
form.submit();
}
});
cancel.addClickHandler(
new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
box.hide();
}
});
final FlowPanel buttons = new FlowPanel();
buttons.setStyleName(Gerrit.RESOURCES.css().patchSetActions());
buttons.add(register);
buttons.add(cancel);
if (!Gerrit.info().auth().isDev()) {
body.add(new HTML(Util.C.descRegisterNewEmail()));
}
body.add(inEmail);
body.add(buttons);
box.setText(Util.C.titleRegisterNewEmail());
box.setWidget(form);
box.center();
inEmail.setFocus(true);
}
void doSave() {
final String newName;
String name = canEditFullName() ? nameTxt.getText() : null;
if (name != null && name.trim().isEmpty()) {
newName = null;
} else {
newName = name;
}
final String newEmail;
if (emailPick.isEnabled() && emailPick.getSelectedIndex() >= 0) {
final String v = emailPick.getValue(emailPick.getSelectedIndex());
if (Util.C.buttonOpenRegisterNewEmail().equals(v)) {
newEmail = currentEmail;
} else {
newEmail = v;
}
} else {
newEmail = currentEmail;
}
save.setEnabled(false);
registerNewEmail.setEnabled(false);
CallbackGroup group = new CallbackGroup();
if (currentEmail != null && !newEmail.equals(currentEmail)) {
AccountApi.setPreferredEmail(
"self",
newEmail,
group.add(
new GerritCallback<NativeString>() {
@Override
public void onSuccess(NativeString result) {}
}));
}
AccountApi.setName(
"self",
newName,
group.add(
new GerritCallback<NativeString>() {
@Override
public void onSuccess(NativeString result) {}
@Override
public void onFailure(Throwable caught) {
save.setEnabled(true);
registerNewEmail.setEnabled(true);
super.onFailure(caught);
}
}));
group.done();
group.addListener(
new GerritCallback<Void>() {
@Override
public void onSuccess(Void result) {
currentEmail = newEmail;
AccountInfo me = Gerrit.getUserAccount();
me.email(currentEmail);
me.name(newName);
onSaveSuccess(me);
registerNewEmail.setEnabled(true);
}
});
}
void onSaveSuccess(AccountInfo result) {
AccountInfo me = Gerrit.getUserAccount();
me.name(result.name());
me.email(result.email());
Gerrit.refreshMenuBar();
display(me);
}
private int emailListIndexOf(String value) {
for (int i = 0; i < emailPick.getItemCount(); i++) {
if (value.equalsIgnoreCase(emailPick.getValue(i))) {
return i;
}
}
return -1;
}
private void updateEmailList() {
if (currentEmail != null) {
int index = emailListIndexOf(currentEmail);
if (index == -1) {
emailPick.addItem(currentEmail);
emailPick.setSelectedIndex(emailPick.getItemCount() - 1);
} else {
emailPick.setSelectedIndex(index);
}
}
if (emailPick.getItemCount() > 0) {
if (currentEmail == null) {
int index = emailListIndexOf("");
if (index != -1) {
emailPick.removeItem(index);
}
emailPick.insertItem("", 0);
emailPick.setSelectedIndex(0);
}
emailPick.setVisible(true);
emailPick.setEnabled(true);
if (canRegisterNewEmail()) {
final String t = Util.C.buttonOpenRegisterNewEmail();
int index = emailListIndexOf(t);
if (index != -1) {
emailPick.removeItem(index);
}
emailPick.addItem("... " + t + " ", t);
}
} else {
emailPick.setVisible(false);
}
}
}
| |
package com.esotericsoftware.kryonet.rmi;
import static com.esotericsoftware.minlog.Log.*;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.KryoException;
import com.esotericsoftware.kryo.KryoSerializable;
import com.esotericsoftware.kryo.Serializer;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.esotericsoftware.kryo.serializers.FieldSerializer;
import com.esotericsoftware.kryo.util.IntMap;
import com.esotericsoftware.kryonet.Connection;
import com.esotericsoftware.kryonet.EndPoint;
import com.esotericsoftware.kryonet.FrameworkMessage;
import com.esotericsoftware.kryonet.Listener;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.PriorityQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
/** Allows methods on objects to be invoked remotely over TCP. Objects are {@link #register(int, Object) registered} with an ID.
* The remote end of connections that have been {@link #addConnection(Connection) added} are allowed to
* {@link #getRemoteObject(Connection, int, Class) access} registered objects.
* <p>
* It costs at least 2 bytes more to use remote method invocation than just sending the parameters. If the method has a return
* value which is not {@link RemoteObject#setNonBlocking(boolean) ignored}, an extra byte is written. If the type of a parameter is
* not final (note primitives are final) then an extra byte is written for that parameter.
* @author Nathan Sweet <misc@n4te.com> */
public class ObjectSpace {
static private final byte kReturnValMask = (byte)0x80; // 1000 0000
static private final byte kReturnExMask = (byte)0x40; // 0100 0000
static private final Object instancesLock = new Object();
static ObjectSpace[] instances = new ObjectSpace[0];
static private final HashMap<Class, CachedMethod[]> methodCache = new HashMap();
final IntMap idToObject = new IntMap();
Connection[] connections = {};
final Object connectionsLock = new Object();
Executor executor;
private final Listener invokeListener = new Listener() {
public void received (final Connection connection, Object object) {
if (!(object instanceof InvokeMethod)) return;
if (connections != null) {
int i = 0, n = connections.length;
for (; i < n; i++)
if (connection == connections[i]) break;
if (i == n) return; // The InvokeMethod message is not for a connection in this ObjectSpace.
}
final InvokeMethod invokeMethod = (InvokeMethod)object;
final Object target = idToObject.get(invokeMethod.objectID);
if (target == null) {
if (WARN) warn("kryonet", "Ignoring remote invocation request for unknown object ID: " + invokeMethod.objectID);
return;
}
if (executor == null)
invoke(connection, target, invokeMethod);
else {
executor.execute(new Runnable() {
public void run () {
invoke(connection, target, invokeMethod);
}
});
}
}
public void disconnected (Connection connection) {
removeConnection(connection);
}
};
/** Creates an ObjectSpace with no connections. Connections must be {@link #addConnection(Connection) added} to allow the remote
* end of the connections to access objects in this ObjectSpace. */
public ObjectSpace () {
synchronized (instancesLock) {
ObjectSpace[] instances = ObjectSpace.instances;
ObjectSpace[] newInstances = new ObjectSpace[instances.length + 1];
newInstances[0] = this;
System.arraycopy(instances, 0, newInstances, 1, instances.length);
ObjectSpace.instances = newInstances;
}
}
/** Creates an ObjectSpace with the specified connection. More connections can be {@link #addConnection(Connection) added}. */
public ObjectSpace (Connection connection) {
this();
addConnection(connection);
}
/** Sets the executor used to invoke methods when an invocation is received from a remote endpoint. By default, no executor is
* set and invocations occur on the network thread, which should not be blocked for long.
* @param executor May be null. */
public void setExecutor (Executor executor) {
this.executor = executor;
}
/** Registers an object to allow the remote end of the ObjectSpace's connections to access it using the specified ID.
* <p>
* If a connection is added to multiple ObjectSpaces, the same object ID should not be registered in more than one of those
* ObjectSpaces.
* @see #getRemoteObject(Connection, int, Class...) */
public void register (int objectID, Object object) {
if (object == null) throw new IllegalArgumentException("object cannot be null.");
idToObject.put(objectID, object);
if (TRACE) trace("kryonet", "Object registered with ObjectSpace as " + objectID + ": " + object);
}
/** Removes an object. The remote end of the ObjectSpace's connections will no longer be able to access it. */
public void remove (int objectID) {
Object object = idToObject.remove(objectID);
if (TRACE) trace("kryonet", "Object " + objectID + " removed from ObjectSpace: " + object);
}
/** Removes an object. The remote end of the ObjectSpace's connections will no longer be able to access it. */
public void remove (Object object) {
if (!idToObject.containsValue(object, true)) return;
int objectID = idToObject.findKey(object, true, -1);
idToObject.remove(objectID);
if (TRACE) trace("kryonet", "Object " + objectID + " removed from ObjectSpace: " + object);
}
/** Causes this ObjectSpace to stop listening to the connections for method invocation messages. */
public void close () {
Connection[] connections = this.connections;
for (int i = 0; i < connections.length; i++)
connections[i].removeListener(invokeListener);
synchronized (instancesLock) {
ArrayList<Connection> temp = new ArrayList(Arrays.asList(instances));
temp.remove(this);
instances = temp.toArray(new ObjectSpace[temp.size()]);
}
if (TRACE) trace("kryonet", "Closed ObjectSpace.");
}
/** Allows the remote end of the specified connection to access objects registered in this ObjectSpace. */
public void addConnection (Connection connection) {
if (connection == null) throw new IllegalArgumentException("connection cannot be null.");
synchronized (connectionsLock) {
Connection[] newConnections = new Connection[connections.length + 1];
newConnections[0] = connection;
System.arraycopy(connections, 0, newConnections, 1, connections.length);
connections = newConnections;
}
connection.addListener(invokeListener);
if (TRACE) trace("kryonet", "Added connection to ObjectSpace: " + connection);
}
/** Removes the specified connection, it will no longer be able to access objects registered in this ObjectSpace. */
public void removeConnection (Connection connection) {
if (connection == null) throw new IllegalArgumentException("connection cannot be null.");
connection.removeListener(invokeListener);
synchronized (connectionsLock) {
ArrayList<Connection> temp = new ArrayList(Arrays.asList(connections));
temp.remove(connection);
connections = temp.toArray(new Connection[temp.size()]);
}
if (TRACE) trace("kryonet", "Removed connection from ObjectSpace: " + connection);
}
/** Invokes the method on the object and, if necessary, sends the result back to the connection that made the invocation
* request. This method is invoked on the update thread of the {@link EndPoint} for this ObjectSpace and unless an
* {@link #setExecutor(Executor) executor} has been set.
* @param connection The remote side of this connection requested the invocation. */
protected void invoke (Connection connection, Object target, InvokeMethod invokeMethod) {
if (DEBUG) {
String argString = "";
if (invokeMethod.args != null) {
argString = Arrays.deepToString(invokeMethod.args);
argString = argString.substring(1, argString.length() - 1);
}
debug("kryonet", connection + " received: " + target.getClass().getSimpleName() + "#" + invokeMethod.method.getName()
+ "(" + argString + ")");
}
byte responseID = invokeMethod.responseID;
boolean transmitReturnVal = (responseID & kReturnValMask) == kReturnValMask;
boolean transmitExceptions = (responseID & kReturnExMask) == kReturnExMask;
Object result = null;
Method method = invokeMethod.method;
try {
result = method.invoke(target, invokeMethod.args);
// Catch exceptions caused by the Method#invoke
} catch (InvocationTargetException ex) {
if (transmitExceptions)
result = ex.getCause();
else
throw new RuntimeException("Error invoking method: " + method.getDeclaringClass().getName() + "." + method.getName(),
ex);
} catch (Exception ex) {
throw new RuntimeException("Error invoking method: " + method.getDeclaringClass().getName() + "." + method.getName(), ex);
}
if (responseID == 0) return;
InvokeMethodResult invokeMethodResult = new InvokeMethodResult();
invokeMethodResult.objectID = invokeMethod.objectID;
invokeMethodResult.responseID = responseID;
// Do not return non-primitives if transmitReturnVal is false
if (!transmitReturnVal && !invokeMethod.method.getReturnType().isPrimitive()) {
invokeMethodResult.result = null;
} else {
invokeMethodResult.result = result;
}
int length = connection.sendTCP(invokeMethodResult);
if (DEBUG) debug("kryonet", connection + " sent: " + result + " (" + length + ")");
}
/** Identical to {@link #getRemoteObject(Connection, int, Class...)} except returns the object cast to the specified interface
* type. The returned object still implements {@link RemoteObject}. */
static public <T> T getRemoteObject (final Connection connection, int objectID, Class<T> iface) {
return (T)getRemoteObject(connection, objectID, new Class[] {iface});
}
/** Returns a proxy object that implements the specified interfaces. Methods invoked on the proxy object will be invoked
* remotely on the object with the specified ID in the ObjectSpace for the specified connection. If the remote end of the
* connection has not {@link #addConnection(Connection) added} the connection to the ObjectSpace, the remote method invocations
* will be ignored.
* <p>
* Methods that return a value will throw {@link TimeoutException} if the response is not received with the
* {@link RemoteObject#setResponseTimeout(int) response timeout}.
* <p>
* If {@link RemoteObject#setNonBlocking(boolean) non-blocking} is false (the default), then methods that return a value must
* not be called from the update thread for the connection. An exception will be thrown if this occurs. Methods with a void
* return value can be called on the update thread.
* <p>
* If a proxy returned from this method is part of an object graph sent over the network, the object graph on the receiving
* side will have the proxy object replaced with the registered object.
* @see RemoteObject */
static public RemoteObject getRemoteObject (Connection connection, int objectID, Class... ifaces) {
if (connection == null) throw new IllegalArgumentException("connection cannot be null.");
if (ifaces == null) throw new IllegalArgumentException("ifaces cannot be null.");
Class[] temp = new Class[ifaces.length + 1];
temp[0] = RemoteObject.class;
System.arraycopy(ifaces, 0, temp, 1, ifaces.length);
return (RemoteObject)Proxy.newProxyInstance(ObjectSpace.class.getClassLoader(), temp, new RemoteInvocationHandler(
connection, objectID));
}
/** Handles network communication when methods are invoked on a proxy. */
static private class RemoteInvocationHandler implements InvocationHandler {
private final Connection connection;
final int objectID;
private int timeoutMillis = 3000;
private boolean nonBlocking = false;
private boolean transmitReturnValue = true;
private boolean transmitExceptions = true;
private Byte lastResponseID;
private byte nextResponseNum = 1;
private Listener responseListener;
final ReentrantLock lock = new ReentrantLock();
final Condition responseCondition = lock.newCondition();
final ConcurrentHashMap<Byte, InvokeMethodResult> responseTable = new ConcurrentHashMap();
public RemoteInvocationHandler (Connection connection, final int objectID) {
super();
this.connection = connection;
this.objectID = objectID;
responseListener = new Listener() {
public void received (Connection connection, Object object) {
if (!(object instanceof InvokeMethodResult)) return;
InvokeMethodResult invokeMethodResult = (InvokeMethodResult)object;
if (invokeMethodResult.objectID != objectID) return;
responseTable.put(invokeMethodResult.responseID, invokeMethodResult);
lock.lock();
try {
responseCondition.signalAll();
} finally {
lock.unlock();
}
}
public void disconnected (Connection connection) {
close();
}
};
connection.addListener(responseListener);
}
public Object invoke (Object proxy, Method method, Object[] args) throws Exception {
if (method.getDeclaringClass() == RemoteObject.class) {
String name = method.getName();
if (name.equals("close")) {
close();
return null;
} else if (name.equals("setResponseTimeout")) {
timeoutMillis = (Integer)args[0];
return null;
} else if (name.equals("setNonBlocking")) {
nonBlocking = (Boolean)args[0];
return null;
} else if (name.equals("setTransmitReturnValue")) {
transmitReturnValue = (Boolean)args[0];
return null;
} else if (name.equals("setTransmitExceptions")) {
transmitExceptions = (Boolean)args[0];
return null;
} else if (name.equals("waitForLastResponse")) {
if (lastResponseID == null) throw new IllegalStateException("There is no last response to wait for.");
return waitForResponse(lastResponseID);
} else if (name.equals("getLastResponseID")) {
if (lastResponseID == null) throw new IllegalStateException("There is no last response ID.");
return lastResponseID;
} else if (name.equals("waitForResponse")) {
if (!transmitReturnValue && !transmitExceptions && nonBlocking)
throw new IllegalStateException("This RemoteObject is currently set to ignore all responses.");
return waitForResponse((Byte)args[0]);
} else if (name.equals("getConnection")) {
return connection;
} else {
// Should never happen, for debugging purposes only
throw new RuntimeException("Invocation handler could not find RemoteObject method. Check ObjectSpace.java");
}
} else if (method.getDeclaringClass() == Object.class) {
if (method.getName().equals("toString")) return "<proxy>";
try {
return method.invoke(proxy, args);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
InvokeMethod invokeMethod = new InvokeMethod();
invokeMethod.objectID = objectID;
invokeMethod.method = method;
invokeMethod.args = args;
// The only time a invocation doesn't need a response is if it's async
// and no return values or exceptions are wanted back.
boolean needsResponse = transmitReturnValue || transmitExceptions || !nonBlocking;
if (needsResponse) {
byte responseID;
synchronized (this) {
// Increment the response counter and put it into the first six bits of the responseID byte
responseID = nextResponseNum++;
if (nextResponseNum == 64) nextResponseNum = 1; // Keep number under 2^6, avoid 0 (see else statement below)
}
// Pack return value and exception info into the top two bits
if (transmitReturnValue) responseID |= kReturnValMask;
if (transmitExceptions) responseID |= kReturnExMask;
invokeMethod.responseID = responseID;
} else {
invokeMethod.responseID = 0; // A response info of 0 means to not respond
}
int length = connection.sendTCP(invokeMethod);
if (DEBUG) {
String argString = "";
if (args != null) {
argString = Arrays.deepToString(args);
argString = argString.substring(1, argString.length() - 1);
}
debug("kryonet", connection + " sent: " + method.getDeclaringClass().getSimpleName() + "#" + method.getName() + "("
+ argString + ") (" + length + ")");
}
if (invokeMethod.responseID != 0) lastResponseID = invokeMethod.responseID;
if (nonBlocking) {
Class returnType = method.getReturnType();
if (returnType.isPrimitive()) {
if (returnType == int.class) return 0;
if (returnType == boolean.class) return Boolean.FALSE;
if (returnType == float.class) return 0f;
if (returnType == char.class) return (char)0;
if (returnType == long.class) return 0l;
if (returnType == short.class) return (short)0;
if (returnType == byte.class) return (byte)0;
if (returnType == double.class) return 0d;
}
return null;
}
try {
Object result = waitForResponse(invokeMethod.responseID);
if (result != null && result instanceof Exception)
throw (Exception)result;
else
return result;
} catch (TimeoutException ex) {
throw new TimeoutException("Response timed out: " + method.getDeclaringClass().getName() + "." + method.getName());
}
}
private Object waitForResponse (byte responseID) {
if (connection.getEndPoint().getUpdateThread() == Thread.currentThread())
throw new IllegalStateException("Cannot wait for an RMI response on the connection's update thread.");
long endTime = System.currentTimeMillis() + timeoutMillis;
while (true) {
long remaining = endTime - System.currentTimeMillis();
if (responseTable.containsKey(responseID)) {
InvokeMethodResult invokeMethodResult = responseTable.get(responseID);
responseTable.remove(responseID);
lastResponseID = null;
return invokeMethodResult.result;
} else {
if (remaining <= 0) throw new TimeoutException("Response timed out.");
lock.lock();
try {
responseCondition.await(remaining, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} finally {
lock.unlock();
}
}
}
}
void close () {
connection.removeListener(responseListener);
}
}
/** Internal message to invoke methods remotely. */
static public class InvokeMethod implements FrameworkMessage, KryoSerializable {
public int objectID;
public Method method;
public Object[] args;
// The top two bytes of the ID indicate if the remote invocation should respond with return values and exceptions,
// respectively. The rest is a six bit counter. This means up to 63 responses can be stored before undefined behavior
// occurs due to possible duplicate IDs.
public byte responseID;
public void write (Kryo kryo, Output output) {
output.writeInt(objectID, true);
int methodClassID = kryo.getRegistration(method.getDeclaringClass()).getId();
output.writeInt(methodClassID, true);
CachedMethod[] cachedMethods = getMethods(kryo, method.getDeclaringClass());
CachedMethod cachedMethod = null;
for (int i = 0, n = cachedMethods.length; i < n; i++) {
cachedMethod = cachedMethods[i];
if (cachedMethod.method.equals(method)) {
output.writeByte(i);
break;
}
}
for (int i = 0, n = cachedMethod.serializers.length; i < n; i++) {
Serializer serializer = cachedMethod.serializers[i];
if (serializer != null)
kryo.writeObjectOrNull(output, args[i], serializer);
else
kryo.writeClassAndObject(output, args[i]);
}
output.writeByte(responseID);
}
public void read (Kryo kryo, Input input) {
objectID = input.readInt(true);
int methodClassID = input.readInt(true);
Class methodClass = kryo.getRegistration(methodClassID).getType();
byte methodIndex = input.readByte();
CachedMethod cachedMethod;
try {
cachedMethod = getMethods(kryo, methodClass)[methodIndex];
} catch (IndexOutOfBoundsException ex) {
throw new KryoException("Invalid method index " + methodIndex + " for class: " + methodClass.getName());
}
method = cachedMethod.method;
args = new Object[cachedMethod.serializers.length];
for (int i = 0, n = args.length; i < n; i++) {
Serializer serializer = cachedMethod.serializers[i];
if (serializer != null)
args[i] = kryo.readObjectOrNull(input, method.getParameterTypes()[i], serializer);
else
args[i] = kryo.readClassAndObject(input);
}
responseID = input.readByte();
}
}
/** Internal message to return the result of a remotely invoked method. */
static public class InvokeMethodResult implements FrameworkMessage {
public int objectID;
public byte responseID;
public Object result;
}
static CachedMethod[] getMethods (Kryo kryo, Class type) {
CachedMethod[] cachedMethods = methodCache.get(type);
if (cachedMethods != null) return cachedMethods;
ArrayList<Method> allMethods = new ArrayList();
Class nextClass = type;
while (nextClass != null && nextClass != Object.class) {
Collections.addAll(allMethods, nextClass.getDeclaredMethods());
nextClass = nextClass.getSuperclass();
}
PriorityQueue<Method> methods = new PriorityQueue(Math.max(1, allMethods.size()), new Comparator<Method>() {
public int compare (Method o1, Method o2) {
// Methods are sorted so they can be represented as an index.
int diff = o1.getName().compareTo(o2.getName());
if (diff != 0) return diff;
Class[] argTypes1 = o1.getParameterTypes();
Class[] argTypes2 = o2.getParameterTypes();
if (argTypes1.length > argTypes2.length) return 1;
if (argTypes1.length < argTypes2.length) return -1;
for (int i = 0; i < argTypes1.length; i++) {
diff = argTypes1[i].getName().compareTo(argTypes2[i].getName());
if (diff != 0) return diff;
}
throw new RuntimeException("Two methods with same signature!"); // Impossible.
}
});
for (int i = 0, n = allMethods.size(); i < n; i++) {
Method method = allMethods.get(i);
int modifiers = method.getModifiers();
if (Modifier.isStatic(modifiers)) continue;
if (Modifier.isPrivate(modifiers)) continue;
if (method.isSynthetic()) continue;
methods.add(method);
}
int n = methods.size();
cachedMethods = new CachedMethod[n];
for (int i = 0; i < n; i++) {
CachedMethod cachedMethod = new CachedMethod();
cachedMethod.method = methods.poll();
// Store the serializer for each final parameter.
Class[] parameterTypes = cachedMethod.method.getParameterTypes();
cachedMethod.serializers = new Serializer[parameterTypes.length];
for (int ii = 0, nn = parameterTypes.length; ii < nn; ii++)
if (kryo.isFinal(parameterTypes[ii])) cachedMethod.serializers[ii] = kryo.getSerializer(parameterTypes[ii]);
cachedMethods[i] = cachedMethod;
}
methodCache.put(type, cachedMethods);
return cachedMethods;
}
/** Returns the first object registered with the specified ID in any of the ObjectSpaces the specified connection belongs to. */
static Object getRegisteredObject (Connection connection, int objectID) {
ObjectSpace[] instances = ObjectSpace.instances;
for (int i = 0, n = instances.length; i < n; i++) {
ObjectSpace objectSpace = instances[i];
// Check if the connection is in this ObjectSpace.
Connection[] connections = objectSpace.connections;
for (int j = 0; j < connections.length; j++) {
if (connections[j] != connection) continue;
// Find an object with the objectID.
Object object = objectSpace.idToObject.get(objectID);
if (object != null) return object;
}
}
return null;
}
/** Registers the classes needed to use ObjectSpaces. This should be called before any connections are opened.
* @see Kryo#register(Class, Serializer) */
static public void registerClasses (final Kryo kryo) {
kryo.register(Object[].class);
kryo.register(InvokeMethod.class);
FieldSerializer serializer = (FieldSerializer)kryo.register(InvokeMethodResult.class).getSerializer();
serializer.getField("objectID").setClass(int.class, new Serializer<Integer>() {
public void write (Kryo kryo, Output output, Integer object) {
output.writeInt(object, true);
}
public Integer read (Kryo kryo, Input input, Class<Integer> type) {
return input.readInt(true);
}
});
kryo.register(InvocationHandler.class, new Serializer() {
public void write (Kryo kryo, Output output, Object object) {
RemoteInvocationHandler handler = (RemoteInvocationHandler)Proxy.getInvocationHandler(object);
output.writeInt(handler.objectID, true);
}
public Object read (Kryo kryo, Input input, Class type) {
int objectID = input.readInt(true);
Connection connection = (Connection)kryo.getContext().get("connection");
Object object = getRegisteredObject(connection, objectID);
if (WARN && object == null) warn("kryonet", "Unknown object ID " + objectID + " for connection: " + connection);
return object;
}
});
}
static class CachedMethod {
Method method;
Serializer[] serializers;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.service;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.conf.Configuration;
import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is the base implementation class for services.
*/
@Public
@Evolving
public abstract class AbstractService implements Service {
private static final Logger LOG =
LoggerFactory.getLogger(AbstractService.class);
/**
* Service name.
*/
private final String name;
/** service state */
private final ServiceStateModel stateModel;
/**
* Service start time. Will be zero until the service is started.
*/
private long startTime;
/**
* The configuration. Will be null until the service is initialized.
*/
private volatile Configuration config;
/**
* List of state change listeners; it is final to ensure
* that it will never be null.
*/
private final ServiceOperations.ServiceListeners listeners
= new ServiceOperations.ServiceListeners();
/**
* Static listeners to all events across all services
*/
private static ServiceOperations.ServiceListeners globalListeners
= new ServiceOperations.ServiceListeners();
/**
* The cause of any failure -will be null.
* if a service did not stop due to a failure.
*/
private Exception failureCause;
/**
* the state in which the service was when it failed.
* Only valid when the service is stopped due to a failure
*/
private STATE failureState = null;
/**
* object used to co-ordinate {@link #waitForServiceToStop(long)}
* across threads.
*/
private final AtomicBoolean terminationNotification =
new AtomicBoolean(false);
/**
* History of lifecycle transitions
*/
private final List<LifecycleEvent> lifecycleHistory
= new ArrayList<LifecycleEvent>(5);
/**
* Map of blocking dependencies
*/
private final Map<String,String> blockerMap = new HashMap<String, String>();
private final Object stateChangeLock = new Object();
/**
* Construct the service.
* @param name service name
*/
public AbstractService(String name) {
this.name = name;
stateModel = new ServiceStateModel(name);
}
@Override
public final STATE getServiceState() {
return stateModel.getState();
}
@Override
public final synchronized Throwable getFailureCause() {
return failureCause;
}
@Override
public synchronized STATE getFailureState() {
return failureState;
}
/**
* Set the configuration for this service.
* This method is called during {@link #init(Configuration)}
* and should only be needed if for some reason a service implementation
* needs to override that initial setting -for example replacing
* it with a new subclass of {@link Configuration}
* @param conf new configuration.
*/
protected void setConfig(Configuration conf) {
this.config = conf;
}
/**
* {@inheritDoc}
* This invokes {@link #serviceInit}
* @param conf the configuration of the service. This must not be null
* @throws ServiceStateException if the configuration was null,
* the state change not permitted, or something else went wrong
*/
@Override
public void init(Configuration conf) {
if (conf == null) {
throw new ServiceStateException("Cannot initialize service "
+ getName() + ": null configuration");
}
if (isInState(STATE.INITED)) {
return;
}
synchronized (stateChangeLock) {
if (enterState(STATE.INITED) != STATE.INITED) {
setConfig(conf);
try {
serviceInit(config);
if (isInState(STATE.INITED)) {
//if the service ended up here during init,
//notify the listeners
notifyListeners();
}
} catch (Exception e) {
noteFailure(e);
ServiceOperations.stopQuietly(LOG, this);
throw ServiceStateException.convert(e);
}
}
}
}
/**
* {@inheritDoc}
* @throws ServiceStateException if the current service state does not permit
* this action
*/
@Override
public void start() {
if (isInState(STATE.STARTED)) {
return;
}
//enter the started state
synchronized (stateChangeLock) {
if (stateModel.enterState(STATE.STARTED) != STATE.STARTED) {
try {
startTime = System.currentTimeMillis();
serviceStart();
if (isInState(STATE.STARTED)) {
//if the service started (and isn't now in a later state), notify
if (LOG.isDebugEnabled()) {
LOG.debug("Service " + getName() + " is started");
}
notifyListeners();
}
} catch (Exception e) {
noteFailure(e);
ServiceOperations.stopQuietly(LOG, this);
throw ServiceStateException.convert(e);
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
public void stop() {
if (isInState(STATE.STOPPED)) {
return;
}
synchronized (stateChangeLock) {
if (enterState(STATE.STOPPED) != STATE.STOPPED) {
try {
serviceStop();
} catch (Exception e) {
//stop-time exceptions are logged if they are the first one,
noteFailure(e);
throw ServiceStateException.convert(e);
} finally {
//report that the service has terminated
terminationNotification.set(true);
synchronized (terminationNotification) {
terminationNotification.notifyAll();
}
//notify anything listening for events
notifyListeners();
}
} else {
//already stopped: note it
if (LOG.isDebugEnabled()) {
LOG.debug("Ignoring re-entrant call to stop()");
}
}
}
}
/**
* Relay to {@link #stop()}
* @throws IOException
*/
@Override
public final void close() throws IOException {
stop();
}
/**
* Failure handling: record the exception
* that triggered it -if there was not one already.
* Services are free to call this themselves.
* @param exception the exception
*/
protected final void noteFailure(Exception exception) {
if (LOG.isDebugEnabled()) {
LOG.debug("noteFailure " + exception, (Throwable) null);
}
if (exception == null) {
//make sure failure logic doesn't itself cause problems
return;
}
//record the failure details, and log it
synchronized (this) {
if (failureCause == null) {
failureCause = exception;
failureState = getServiceState();
LOG.info("Service " + getName()
+ " failed in state " + failureState
+ "; cause: " + exception,
exception);
}
}
}
@Override
public final boolean waitForServiceToStop(long timeout) {
boolean completed = terminationNotification.get();
while (!completed) {
try {
synchronized(terminationNotification) {
terminationNotification.wait(timeout);
}
// here there has been a timeout, the object has terminated,
// or there has been a spurious wakeup (which we ignore)
completed = true;
} catch (InterruptedException e) {
// interrupted; have another look at the flag
completed = terminationNotification.get();
}
}
return terminationNotification.get();
}
/* ===================================================================== */
/* Override Points */
/* ===================================================================== */
/**
* All initialization code needed by a service.
*
* This method will only ever be called once during the lifecycle of
* a specific service instance.
*
* Implementations do not need to be synchronized as the logic
* in {@link #init(Configuration)} prevents re-entrancy.
*
* The base implementation checks to see if the subclass has created
* a new configuration instance, and if so, updates the base class value
* @param conf configuration
* @throws Exception on a failure -these will be caught,
* possibly wrapped, and wil; trigger a service stop
*/
protected void serviceInit(Configuration conf) throws Exception {
if (conf != config) {
LOG.debug("Config has been overridden during init");
setConfig(conf);
}
}
/**
* Actions called during the INITED to STARTED transition.
*
* This method will only ever be called once during the lifecycle of
* a specific service instance.
*
* Implementations do not need to be synchronized as the logic
* in {@link #start()} prevents re-entrancy.
*
* @throws Exception if needed -these will be caught,
* wrapped, and trigger a service stop
*/
protected void serviceStart() throws Exception {
}
/**
* Actions called during the transition to the STOPPED state.
*
* This method will only ever be called once during the lifecycle of
* a specific service instance.
*
* Implementations do not need to be synchronized as the logic
* in {@link #stop()} prevents re-entrancy.
*
* Implementations MUST write this to be robust against failures, including
* checks for null references -and for the first failure to not stop other
* attempts to shut down parts of the service.
*
* @throws Exception if needed -these will be caught and logged.
*/
protected void serviceStop() throws Exception {
}
@Override
public void registerServiceListener(ServiceStateChangeListener l) {
listeners.add(l);
}
@Override
public void unregisterServiceListener(ServiceStateChangeListener l) {
listeners.remove(l);
}
/**
* Register a global listener, which receives notifications
* from the state change events of all services in the JVM
* @param l listener
*/
public static void registerGlobalListener(ServiceStateChangeListener l) {
globalListeners.add(l);
}
/**
* unregister a global listener.
* @param l listener to unregister
* @return true if the listener was found (and then deleted)
*/
public static boolean unregisterGlobalListener(ServiceStateChangeListener l) {
return globalListeners.remove(l);
}
/**
* Package-scoped method for testing -resets the global listener list
*/
@VisibleForTesting
static void resetGlobalListeners() {
globalListeners.reset();
}
@Override
public String getName() {
return name;
}
@Override
public Configuration getConfig() {
return config;
}
@Override
public long getStartTime() {
return startTime;
}
/**
* Notify local and global listeners of state changes.
* Exceptions raised by listeners are NOT passed up.
*/
private void notifyListeners() {
try {
listeners.notifyListeners(this);
globalListeners.notifyListeners(this);
} catch (Throwable e) {
LOG.warn("Exception while notifying listeners of " + this + ": " + e,
e);
}
}
/**
* Add a state change event to the lifecycle history
*/
private void recordLifecycleEvent() {
LifecycleEvent event = new LifecycleEvent();
event.time = System.currentTimeMillis();
event.state = getServiceState();
lifecycleHistory.add(event);
}
@Override
public synchronized List<LifecycleEvent> getLifecycleHistory() {
return new ArrayList<LifecycleEvent>(lifecycleHistory);
}
/**
* Enter a state; record this via {@link #recordLifecycleEvent}
* and log at the info level.
* @param newState the proposed new state
* @return the original state
* it wasn't already in that state, and the state model permits state re-entrancy.
*/
private STATE enterState(STATE newState) {
assert stateModel != null : "null state in " + name + " " + this.getClass();
STATE oldState = stateModel.enterState(newState);
if (oldState != newState) {
if (LOG.isDebugEnabled()) {
LOG.debug(
"Service: " + getName() + " entered state " + getServiceState());
}
recordLifecycleEvent();
}
return oldState;
}
@Override
public final boolean isInState(Service.STATE expected) {
return stateModel.isInState(expected);
}
@Override
public String toString() {
return "Service " + name + " in state " + stateModel;
}
/**
* Put a blocker to the blocker map -replacing any
* with the same name.
* @param name blocker name
* @param details any specifics on the block. This must be non-null.
*/
protected void putBlocker(String name, String details) {
synchronized (blockerMap) {
blockerMap.put(name, details);
}
}
/**
* Remove a blocker from the blocker map -
* this is a no-op if the blocker is not present
* @param name the name of the blocker
*/
public void removeBlocker(String name) {
synchronized (blockerMap) {
blockerMap.remove(name);
}
}
@Override
public Map<String, String> getBlockers() {
synchronized (blockerMap) {
Map<String, String> map = new HashMap<String, String>(blockerMap);
return map;
}
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.List;
import java.util.logging.Level;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.sleuthkit.autopsy.coreutils.Logger;
import javax.swing.Action;
import javax.swing.BoxLayout;
import javax.swing.JOptionPane;
import org.openide.util.ImageUtilities;
import org.openide.util.NbBundle;
import org.openide.util.Utilities;
import org.openide.windows.Mode;
import org.openide.windows.TopComponent;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.datamodel.Content;
/**
* Top component which displays something.
*/
public final class IngestMessageTopComponent extends TopComponent implements IngestUI {
private static IngestMessageTopComponent instance;
private static final Logger logger = Logger.getLogger(IngestMessageTopComponent.class.getName());
private IngestMessageMainPanel messagePanel;
private IngestManager manager;
private static String PREFERRED_ID = "IngestMessageTopComponent";
private ActionListener showIngestInboxAction;
private static final Pattern tagRemove = Pattern.compile("<.+?>");
public IngestMessageTopComponent() {
initComponents();
customizeComponents();
registerListeners();
setName(NbBundle.getMessage(IngestMessageTopComponent.class, "CTL_IngestMessageTopComponent"));
setToolTipText(NbBundle.getMessage(IngestMessageTopComponent.class, "HINT_IngestMessageTopComponent"));
//putClientProperty(TopComponent.PROP_CLOSING_DISABLED, Boolean.TRUE);
showIngestInboxAction = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
IngestMessagesToolbar.getDefault().showIngestMessages();
}
};
}
private static synchronized IngestMessageTopComponent getDefault() {
if (instance == null) {
instance = new IngestMessageTopComponent();
}
return instance;
}
public static synchronized IngestMessageTopComponent findInstance() {
TopComponent win = WindowManager.getDefault().findTopComponent(PREFERRED_ID);
if (win == null) {
return getDefault();
}
if (win instanceof IngestMessageTopComponent) {
return (IngestMessageTopComponent) win;
}
return getDefault();
}
@Override
protected String preferredID() {
return PREFERRED_ID;
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
setDisplayName(org.openide.util.NbBundle.getMessage(IngestMessageTopComponent.class, "IngestMessageTopComponent.displayName")); // NOI18N
setName("Ingest Inbox"); // NOI18N
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 332, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 210, Short.MAX_VALUE)
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
// End of variables declaration//GEN-END:variables
@Override
public void componentOpened() {
//logger.log(Level.INFO, "OPENED");
super.componentOpened();
//create manager instance
if (manager == null) {
manager = IngestManager.getDefault();
}
}
@Override
public void componentClosed() {
//logger.log(Level.INFO, "CLOSED");
super.componentClosed();
// mark all the messages as seen (this will make the 'New?' columen NOT
// show a ckeckmark)
messagePanel.markAllSeen();
}
@Override
protected void componentShowing() {
//logger.log(Level.INFO, "SHOWING");
super.componentShowing();
Mode mode = WindowManager.getDefault().findMode("floatingLeftBottom");
if (mode != null) {
TopComponent[] tcs = mode.getTopComponents();
for (int i = 0; i < tcs.length; ++i) {
if (tcs[i] == this) //already floating
{
this.open();
return;
}
}
mode.dockInto(this);
this.open();
}
}
@Override
protected void componentHidden() {
//logger.log(Level.INFO, "HIDDEN");
super.componentHidden();
}
@Override
protected void componentActivated() {
//logger.log(Level.INFO, "ACTIVATED");
super.componentActivated();
}
@Override
protected void componentDeactivated() {
//logger.log(Level.INFO, "DEACTIVATED");
super.componentDeactivated();
}
@Override
public boolean canClose() {
return true;
}
@Override
public int getPersistenceType() {
return TopComponent.PERSISTENCE_ALWAYS;
}
@Override
public java.awt.Image getIcon() {
return ImageUtilities.loadImage(
"org/sleuthkit/autopsy/ingest/eye-icon.png");
}
void writeProperties(java.util.Properties p) {
// better to version settings since initial version as advocated at
// http://wiki.apidesign.org/wiki/PropertyFiles
p.setProperty("version", "1.0");
// TODO store your settings
}
void readProperties(java.util.Properties p) {
String version = p.getProperty("version");
// TODO read your settings according to their version
}
private void registerListeners() {
//handle case change
Case.addPropertyChangeListener(new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getPropertyName().equals(Case.CASE_CURRENT_CASE)) {
Case oldCase = (Case) evt.getOldValue();
if (oldCase == null) //nothing to do, new case had been opened
{
return;
}
//stop workers if running
if (manager == null) {
manager = IngestManager.getDefault();
}
try {
manager.stopAll();
} finally {
//clear inbox
clearMessages();
}
}
}
});
}
private void customizeComponents() {
//custom GUI setup not done by builder
messagePanel = new IngestMessageMainPanel();
messagePanel.setOpaque(true);
//setLayout(new BorderLayout());
setLayout(new BoxLayout(this, BoxLayout.PAGE_AXIS));
add(messagePanel);
}
/**
* Display ingest summary report in some dialog
*/
@Override
public void displayReport(String ingestReport) {
Object[] options = {"OK",
"Generate Report"};
final int choice = JOptionPane.showOptionDialog(null,
ingestReport,
"Ingest Report",
JOptionPane.YES_NO_OPTION,
JOptionPane.INFORMATION_MESSAGE,
null,
options,
options[0]);
final String reportActionName = "org.sleuthkit.autopsy.report.ReportAction";
Action reportAction = null;
//find action by name from action lookup, without introducing cyclic dependency
if (choice == JOptionPane.NO_OPTION) {
List<? extends Action> actions = Utilities.actionsForPath("Toolbars/File");
for (Action a : actions) {
//separators are null actions
if (a != null) {
if (a.getClass().getCanonicalName().equals(reportActionName)) {
reportAction = a;
break;
}
}
}
if (reportAction == null) {
logger.log(Level.SEVERE, "Could not locate Action: " + reportActionName);
} else {
reportAction.actionPerformed(null);
}
}
}
/**
* Display IngestMessage from module (forwarded by IngestManager)
*/
@Override
public void displayMessage(IngestMessage ingestMessage) {
messagePanel.addMessage(ingestMessage);
//post special messages to notification area
MessageType ingestMessageType = ingestMessage.getMessageType();
if (ingestMessageType.equals(MessageType.ERROR)
|| ingestMessageType.equals(MessageType.WARNING)) {
MessageNotifyUtil.MessageType notifyMessageType =
ingestMessageType.equals(MessageType.ERROR)
? MessageNotifyUtil.MessageType.ERROR
: MessageNotifyUtil.MessageType.WARNING;
String subject = ingestMessage.getSubject();
String details = ingestMessage.getDetails();
if (details == null) {
details = "";
}
//strip html tags in case they are present in ingest message
details = stripHtmlTags(details);
MessageNotifyUtil.Notify.show(subject, details,
notifyMessageType, showIngestInboxAction);
}
}
@Override
public int getMessagesCount() {
return messagePanel.getMessagesCount();
}
@Override
public void clearMessages() {
messagePanel.clearMessages();
}
@Override
public void displayIngestDialog(final Content ingestDataSource) {
/*
final IngestDialog ingestDialog = new IngestDialog();
ingestDialog.setImage(image);
ingestDialog.display();
*/
}
@Override
public void restoreMessages() {
//componentShowing();
}
@Override
public Action[] getActions() {
//disable TC toolbar actions
return new Action[0];
}
private static String stripHtmlTags(String string) {
if (string == null || string.length() == 0) {
return string;
}
Matcher m = tagRemove.matcher(string);
return m.replaceAll("");
}
}
| |
package com.dm.wallpaper.board.fragments.dialogs;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.util.Log;
import android.view.View;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.afollestad.materialdialogs.MaterialDialog;
import com.anjlab.android.iab.v3.BillingProcessor;
import com.anjlab.android.iab.v3.SkuDetails;
import com.dm.wallpaper.board.R;
import com.dm.wallpaper.board.R2;
import com.dm.wallpaper.board.adapters.InAppBillingAdapter;
import com.dm.wallpaper.board.helpers.TypefaceHelper;
import com.dm.wallpaper.board.items.InAppBilling;
import com.dm.wallpaper.board.utils.Extras;
import com.dm.wallpaper.board.utils.LogUtil;
import com.dm.wallpaper.board.utils.listeners.InAppBillingListener;
import butterknife.BindView;
import butterknife.ButterKnife;
/*
* Wallpaper Board
*
* Copyright (c) 2017 Dani Mahardhika
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class InAppBillingFragment extends DialogFragment {
@BindView(R2.id.listview)
ListView mListView;
@BindView(R2.id.progress)
ProgressBar mProgress;
private String mKey;
private String[] mProductsId;
private InAppBillingAdapter mAdapter;
private AsyncTask<Void, Void, Boolean> mLoadInAppProducts;
private static BillingProcessor mBillingProcessor;
private static final String TAG = "com.dm.wallpaper.board.dialog.inappbilling";
private static InAppBillingFragment newInstance(String key, String[] productId) {
InAppBillingFragment fragment = new InAppBillingFragment();
Bundle bundle = new Bundle();
bundle.putString(Extras.EXTRA_KEY, key);
bundle.putStringArray(Extras.EXTRA_PRODUCT_ID, productId);
fragment.setArguments(bundle);
return fragment;
}
public static void showInAppBillingDialog(@NonNull FragmentManager fm, BillingProcessor billingProcessor,
@NonNull String key, @NonNull String[] productId) {
mBillingProcessor = billingProcessor;
FragmentTransaction ft = fm.beginTransaction();
Fragment prev = fm.findFragmentByTag(TAG);
if (prev != null) {
ft.remove(prev);
}
try {
DialogFragment dialog = InAppBillingFragment.newInstance(key, productId);
dialog.show(ft, TAG);
} catch (IllegalArgumentException | IllegalStateException ignored) {}
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mKey = getArguments().getString(Extras.EXTRA_KEY);
mProductsId = getArguments().getStringArray(Extras.EXTRA_PRODUCT_ID);
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
MaterialDialog.Builder builder = new MaterialDialog.Builder(getActivity());
builder.customView(R.layout.fragment_inappbilling, false)
.typeface(TypefaceHelper.getMedium(getActivity()), TypefaceHelper.getRegular(getActivity()))
.title(R.string.navigation_view_donate)
.positiveText(R.string.donate)
.negativeText(R.string.close)
.onPositive((dialog, which) -> {
if (mLoadInAppProducts == null) {
try {
InAppBillingListener listener = (InAppBillingListener) getActivity();
listener.onInAppBillingSelected(mAdapter.getSelectedProduct());
} catch (Exception ignored) {}
dismiss();
}
});
MaterialDialog dialog = builder.build();
dialog.setCancelable(false);
dialog.setCanceledOnTouchOutside(false);
dialog.show();
setCancelable(false);
ButterKnife.bind(this, dialog);
return dialog;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (savedInstanceState != null) {
mKey = savedInstanceState.getString(Extras.EXTRA_KEY);
mProductsId = savedInstanceState.getStringArray(Extras.EXTRA_PRODUCT_ID);
}
loadInAppProducts();
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putString(Extras.EXTRA_KEY, mKey);
outState.putStringArray(Extras.EXTRA_PRODUCT_ID, mProductsId);
super.onSaveInstanceState(outState);
}
@Override
public void onDismiss(DialogInterface dialog) {
mBillingProcessor = null;
if (mLoadInAppProducts != null) mLoadInAppProducts.cancel(true);
super.onDismiss(dialog);
}
private void loadInAppProducts() {
mLoadInAppProducts = new AsyncTask<Void, Void, Boolean>() {
InAppBilling[] inAppBillings;
boolean isBillingNotReady = false;
@Override
protected void onPreExecute() {
super.onPreExecute();
mProgress.setVisibility(View.VISIBLE);
inAppBillings = new InAppBilling[mProductsId.length];
}
@Override
protected Boolean doInBackground(Void... voids) {
while (!isCancelled()) {
try {
Thread.sleep(1);
if (mBillingProcessor == null) {
isBillingNotReady = true;
return false;
}
for (int i = 0; i < mProductsId.length; i++) {
SkuDetails product = mBillingProcessor
.getPurchaseListingDetails(mProductsId[i]);
if (product != null) {
InAppBilling inAppBilling;
String title = product.title.substring(0, product.title.lastIndexOf("("));
inAppBilling = new InAppBilling(product.priceText, mProductsId[i], title);
inAppBillings[i] = inAppBilling;
} else {
if (i == mProductsId.length - 1)
return false;
}
}
return true;
} catch (Exception e) {
LogUtil.e(Log.getStackTraceString(e));
return false;
}
}
return false;
}
@Override
protected void onPostExecute(Boolean aBoolean) {
super.onPostExecute(aBoolean);
mProgress.setVisibility(View.GONE);
if (aBoolean) {
mAdapter = new InAppBillingAdapter(getActivity(), inAppBillings);
mListView.setAdapter(mAdapter);
} else {
dismiss();
if (!isBillingNotReady)
Toast.makeText(getActivity(), R.string.billing_load_product_failed,
Toast.LENGTH_LONG).show();
}
mLoadInAppProducts = null;
}
}.execute();
}
}
| |
package edu.psu.chemxseer.structure.postings.Impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import de.parmol.parsers.GraphParser;
import edu.psu.chemxseer.structure.postings.Interface.IGraphFetcher;
import edu.psu.chemxseer.structure.postings.Interface.IGraphResult;
import edu.psu.chemxseer.structure.subsearch.Interfaces.SearchStatus;
/**
* Lucene Lazy Fetcher all The Documents that need to be returned
*
* @author dayuyuan
*
*/
public class GraphFetcherLucene implements IGraphFetcher {
protected IndexSearcher searcher;
protected ScoreDoc[] scoreDocs; // make sure that the docs are ordered by
// their ids
protected int start;
protected GraphParser gParser;
public GraphFetcherLucene(IndexSearcher searcher, TopDocs hits,
GraphParser gParser) {
this.searcher = searcher;
this.scoreDocs = hits.scoreDocs;
this.start = 0;
this.gParser = gParser;
Arrays.sort(scoreDocs, new DocComparator());
}
public GraphFetcherLucene(GraphFetcherLucene lucene) {
this.searcher = lucene.searcher;
this.scoreDocs = lucene.scoreDocs;
this.start = 0;
this.gParser = null;
Arrays.sort(scoreDocs, new DocComparator());
}
@Override
public List<IGraphResult> getGraphs(SearchStatus searchResult) {
if (start == scoreDocs.length)
return null; // no graphs need to return
else {
long startTime = System.currentTimeMillis();
int end = Math.min(start + batchCount, scoreDocs.length);
List<IGraphResult> results = new ArrayList<IGraphResult>();
for (int i = start; i < end; i++) {
int docID = scoreDocs[i].doc;
Document graphDoc = null;
try {
graphDoc = searcher.doc(docID);
} catch (CorruptIndexException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
if (graphDoc != null)
results.add(new GraphResultLucene(gParser, graphDoc, docID));
}
start = end;
searchResult.addDbLoadingTime(System.currentTimeMillis() - startTime);
return results;
}
}
@Override
public int size() {
return this.scoreDocs.length;
}
@Override
public int[] getOrderedIDs() {
int[] results = new int[this.scoreDocs.length];
for (int i = 0; i < results.length; i++)
results[i] = scoreDocs[i].doc;
return results;
}
@Override
public IGraphFetcher join(IGraphFetcher fetcher) {
// A copy of the retain operation of the
// "SelfImplemntSet or IntersectionSet"
int[] otherIDs = fetcher.getOrderedIDs();
if (otherIDs == null || otherIDs.length == 0)
return this; // no need for intersection at all
int iter = 0, i = 0, j = 0;
// i is index on item, j is index on c
while (i < scoreDocs.length && j < otherIDs.length) {
if (scoreDocs[i].doc > otherIDs[j])
j++;
else if (scoreDocs[i].doc == otherIDs[j]) {
scoreDocs[iter++] = scoreDocs[i];
j++;
i++;
continue;
} else {// items[i] < c[j]
i++;
continue;
}
}
ScoreDoc[] newS = new ScoreDoc[iter];
for (int w = 0; w < iter; w++)
newS[w] = this.scoreDocs[w];
this.scoreDocs = newS;
return this;
}
@Override
public IGraphFetcher remove(IGraphFetcher fetcher) {
// A copy of the retain operation of the
// "SelfImplemntSet or IntersectionSet"
int[] otherIDs = fetcher.getOrderedIDs();
if (otherIDs == null || otherIDs.length == 0)
return this; // no need for intersection at all
int iter = 0, i = 0, j = 0;
// i is index on item, j is index on c
while (i < scoreDocs.length && j < otherIDs.length) {
if (scoreDocs[i].doc > otherIDs[j])
j++;
else if (scoreDocs[i].doc == otherIDs[j]) {
j++; // skip this item
i++;
continue;
} else {// items[i] < c[j]
scoreDocs[iter++] = scoreDocs[i];
i++;
continue;
}
}
while (i < scoreDocs.length)
scoreDocs[iter++] = scoreDocs[i++];
ScoreDoc[] newS = new ScoreDoc[iter];
for (int w = 0; w < iter; w++)
newS[w] = this.scoreDocs[w];
this.scoreDocs = newS;
return this;
}
@Override
public IGraphFetcher remove(int[] orderedSet) {
// A copy of the retain operation of the
// "SelfImplemntSet or IntersectionSet"
int[] otherIDs = orderedSet;
if (otherIDs == null || otherIDs.length == 0)
return this; // no need for intersection at all
int iter = 0, i = 0, j = 0;
// i is index on item, j is index on c
while (i < scoreDocs.length && j < otherIDs.length) {
if (scoreDocs[i].doc > otherIDs[j])
j++;
else if (scoreDocs[i].doc == otherIDs[j]) {
j++; // skip this item
i++;
continue;
} else {// items[i] < c[j]
scoreDocs[iter++] = scoreDocs[i];
i++;
continue;
}
}
while (i < scoreDocs.length)
scoreDocs[iter++] = scoreDocs[i++];
ScoreDoc[] newS = new ScoreDoc[iter];
for (int w = 0; w < iter; w++)
newS[w] = this.scoreDocs[w];
this.scoreDocs = newS;
return this;
}
@Override
public List<IGraphResult> getAllGraphs(SearchStatus searchResult) {
List<IGraphResult> answer = new ArrayList<IGraphResult>();
List<IGraphResult> temp = this.getGraphs(searchResult);
while (temp != null) {
answer.addAll(temp);
temp = this.getGraphs(searchResult);
}
Collections.sort(answer);
return answer;
}
}
class DocComparator implements Comparator<ScoreDoc> {
@Override
public int compare(ScoreDoc arg0, ScoreDoc arg1) {
Integer one = arg0.doc;
Integer two = arg1.doc;
return one.compareTo(two);
}
}
| |
/*******************************************************************************
* Copyright 2013 Sebastien Diot
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
/**
*
*/
package com.blockwithme.lessobjects.storage;
import static com.blockwithme.lessobjects.storage.AbstractStorage.STRUCT;
import javax.annotation.Nonnull;
import javax.annotation.ParametersAreNonnullByDefault;
import com.blockwithme.lessobjects.Field;
import com.blockwithme.lessobjects.beans.BooleanValueChange;
import com.blockwithme.lessobjects.beans.ByteValueChange;
import com.blockwithme.lessobjects.beans.CharValueChange;
import com.blockwithme.lessobjects.beans.DoubleValueChange;
import com.blockwithme.lessobjects.beans.FloatValueChange;
import com.blockwithme.lessobjects.beans.IntValueChange;
import com.blockwithme.lessobjects.beans.LongValueChange;
import com.blockwithme.lessobjects.beans.ObjectValueChange;
import com.blockwithme.lessobjects.beans.ShortValueChange;
import com.blockwithme.lessobjects.beans.ValueChange;
import com.blockwithme.lessobjects.fields.global.BooleanGlobalField;
import com.blockwithme.lessobjects.fields.global.ByteGlobalField;
import com.blockwithme.lessobjects.fields.global.CharGlobalField;
import com.blockwithme.lessobjects.fields.global.DoubleGlobalField;
import com.blockwithme.lessobjects.fields.global.FloatGlobalField;
import com.blockwithme.lessobjects.fields.global.IntGlobalField;
import com.blockwithme.lessobjects.fields.global.LongGlobalField;
import com.blockwithme.lessobjects.fields.global.ShortGlobalField;
import com.blockwithme.lessobjects.fields.optional.BooleanOptionalField;
import com.blockwithme.lessobjects.fields.optional.ByteOptionalField;
import com.blockwithme.lessobjects.fields.optional.CharOptionalField;
import com.blockwithme.lessobjects.fields.optional.DoubleOptionalField;
import com.blockwithme.lessobjects.fields.optional.FloatOptionalField;
import com.blockwithme.lessobjects.fields.optional.IntOptionalField;
import com.blockwithme.lessobjects.fields.optional.LongOptionalField;
import com.blockwithme.lessobjects.fields.optional.ShortOptionalField;
import com.blockwithme.lessobjects.fields.primitive.BooleanField;
import com.blockwithme.lessobjects.fields.primitive.ByteField;
import com.blockwithme.lessobjects.fields.primitive.CharField;
import com.blockwithme.lessobjects.fields.primitive.DoubleField;
import com.blockwithme.lessobjects.fields.primitive.FloatField;
import com.blockwithme.lessobjects.fields.primitive.IntField;
import com.blockwithme.lessobjects.fields.primitive.LongField;
import com.blockwithme.lessobjects.fields.primitive.ShortField;
import com.blockwithme.lessobjects.storage.ChangeRecordsImpl.ValueChangeObjects;
/**
* The Enum ChangeType defines all the types of Fields that can be "changed".
* As well as a mean to extract the change data from the change storage.
*
* @author tarung
*/
@ParametersAreNonnullByDefault
public enum ChangeType {
/** The Boolean field change Type. */
BOOLEAN_FIELD {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final boolean oldValue = theChangeStorage.read(STRUCT.getOld()
.booleanField());
final boolean newValue = theChangeStorage.read(STRUCT.getNew()
.booleanField());
final BooleanValueChange booleanValueChange = theChangeObjects
.booleanValueChange();
booleanValueChange.update(theIndex, (BooleanField<?, ?>) theField,
oldValue, newValue);
return booleanValueChange;
}
},
/** The boolean global change Type . */
BOOLEAN_GLOBAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final boolean oldValue = theChangeStorage.read(STRUCT.getOld()
.booleanField());
final boolean newValue = theChangeStorage.read(STRUCT.getNew()
.booleanField());
final BooleanValueChange booleanValueChange = theChangeObjects
.booleanValueChange();
booleanValueChange.update(theIndex,
(BooleanGlobalField<?, ?>) theField, oldValue, newValue);
return booleanValueChange;
}
},
/** The boolean optional field change Type . */
BOOLEAN_OPTIONAL {
@SuppressWarnings("null")
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final boolean oldValue = theChangeStorage.read(STRUCT.getOld()
.booleanField());
final boolean newValue = theChangeStorage.read(STRUCT.getNew()
.booleanField());
final BooleanValueChange booleanValueChange = theChangeObjects
.booleanValueChange();
booleanValueChange.update(theIndex,
(BooleanOptionalField<?, ?>) theField, oldValue, newValue);
return booleanValueChange;
}
},
/** The byte field change Type . */
BYTE_FIELD {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final byte oldValue = theChangeStorage.read(STRUCT.getOld()
.byteField());
final byte newValue = theChangeStorage.read(STRUCT.getNew()
.byteField());
final ByteValueChange byteValueChange = theChangeObjects
.byteValueChange();
byteValueChange.update(theIndex, (ByteField<?, ?>) theField,
oldValue, newValue);
return byteValueChange;
}
},
/** The byte global field change Type . */
BYTE_GLOBAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final byte oldValue = theChangeStorage.read(STRUCT.getOld()
.byteField());
final byte newValue = theChangeStorage.read(STRUCT.getNew()
.byteField());
final ByteValueChange byteValueChange = theChangeObjects
.byteValueChange();
byteValueChange.update(theIndex, (ByteGlobalField<?, ?>) theField,
oldValue, newValue);
return byteValueChange;
}
},
/** The byte optional change Type . */
BYTE_OPTIONAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final byte oldValue = theChangeStorage.read(STRUCT.getOld()
.byteField());
final byte newValue = theChangeStorage.read(STRUCT.getNew()
.byteField());
final ByteValueChange byteValueChange = theChangeObjects
.byteValueChange();
byteValueChange.update(theIndex, (ByteOptionalField<?, ?>) theField,
oldValue, newValue);
return byteValueChange;
}
},
/** The char field change Type . */
CHAR_FIELD {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final char oldValue = theChangeStorage.read(STRUCT.getOld()
.charField());
final char newValue = theChangeStorage.read(STRUCT.getNew()
.charField());
final CharValueChange charValueChange = theChangeObjects
.charValueChange();
charValueChange.update(theIndex, (CharField<?, ?>) theField,
oldValue, newValue);
return charValueChange;
}
},
/** The char global field change Type . */
CHAR_GLOBAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final char oldValue = theChangeStorage.read(STRUCT.getOld()
.charField());
final char newValue = theChangeStorage.read(STRUCT.getNew()
.charField());
final CharValueChange charValueChange = theChangeObjects
.charValueChange();
charValueChange.update(theIndex, (CharGlobalField<?, ?>) theField,
oldValue, newValue);
return charValueChange;
}
},
/** The char optional field change Type . */
CHAR_OPTIONAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final char oldValue = theChangeStorage.read(STRUCT.getOld()
.charField());
final char newValue = theChangeStorage.read(STRUCT.getNew()
.charField());
final CharValueChange charValueChange = theChangeObjects
.charValueChange();
charValueChange.update(theIndex, (CharOptionalField<?, ?>) theField,
oldValue, newValue);
return charValueChange;
}
},
/** The double field change Type . */
DOUBLE_FIELD {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final double oldValue = theChangeStorage.read(STRUCT.getOld()
.doubleField());
final double newValue = theChangeStorage.read(STRUCT.getNew()
.doubleField());
final DoubleValueChange doubleValueChange = theChangeObjects
.doubleValueChange();
doubleValueChange.update(theIndex, (DoubleField<?, ?>) theField,
oldValue, newValue);
return doubleValueChange;
}
},
/** The double global field change Type . */
DOUBLE_GLOBAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final double oldValue = theChangeStorage.read(STRUCT.getOld()
.doubleField());
final double newValue = theChangeStorage.read(STRUCT.getNew()
.doubleField());
final DoubleValueChange doubleValueChange = theChangeObjects
.doubleValueChange();
doubleValueChange.update(theIndex,
(DoubleGlobalField<?, ?>) theField, oldValue, newValue);
return doubleValueChange;
}
},
/** The double optional field change Type . */
DOUBLE_OPTIONAL {
@Override
@Nonnull
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final double oldValue = theChangeStorage.read(STRUCT.getOld()
.doubleField());
final double newValue = theChangeStorage.read(STRUCT.getNew()
.doubleField());
final DoubleValueChange doubleValueChange = theChangeObjects
.doubleValueChange();
doubleValueChange.update(theIndex, (DoubleOptionalField<?, ?>) theField,
oldValue, newValue);
return doubleValueChange;
}
},
/** The float field change Type . */
FLOAT_FIELD {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final float oldValue = theChangeStorage.read(STRUCT.getOld()
.floatField());
final float newValue = theChangeStorage.read(STRUCT.getNew()
.floatField());
final FloatValueChange floatValueChange = theChangeObjects
.floatValueChange();
floatValueChange.update(theIndex, (FloatField<?, ?>) theField,
oldValue, newValue);
return floatValueChange;
}
},
/** The float global field change Type . */
FLOAT_GLOBAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final float oldValue = theChangeStorage.read(STRUCT.getOld()
.floatField());
final float newValue = theChangeStorage.read(STRUCT.getNew()
.floatField());
final FloatValueChange floatValueChange = theChangeObjects
.floatValueChange();
floatValueChange.update(theIndex,
(FloatGlobalField<?, ?>) theField, oldValue, newValue);
return floatValueChange;
}
},
/** The float optional change Type . */
FLOAT_OPTIONAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final float oldValue = theChangeStorage.read(STRUCT.getOld()
.floatField());
final float newValue = theChangeStorage.read(STRUCT.getNew()
.floatField());
final FloatValueChange floatValueChange = theChangeObjects
.floatValueChange();
floatValueChange.update(theIndex, (FloatOptionalField<?, ?>) theField,
oldValue, newValue);
return floatValueChange;
}
},
/** The int field change Type . */
INT_FIELD {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final int oldValue = theChangeStorage.read(STRUCT.getOld()
.intField());
final int newValue = theChangeStorage.read(STRUCT.getNew()
.intField());
final IntValueChange intValueChange = theChangeObjects
.intValueChange();
intValueChange.update(theIndex, (IntField<?, ?>) theField,
oldValue, newValue);
return intValueChange;
}
},
/** The int global field change Type . */
INT_GLOBAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final int oldValue = theChangeStorage.read(STRUCT.getOld()
.intField());
final int newValue = theChangeStorage.read(STRUCT.getNew()
.intField());
final IntValueChange intValueChange = theChangeObjects
.intValueChange();
intValueChange.update(theIndex, (IntGlobalField<?, ?>) theField,
oldValue, newValue);
return intValueChange;
}
},
/** The int optional field change Type . */
INT_OPTIONAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final int oldValue = theChangeStorage.read(STRUCT.getOld()
.intField());
final int newValue = theChangeStorage.read(STRUCT.getNew()
.intField());
final IntValueChange intValueChange = theChangeObjects
.intValueChange();
intValueChange.update(theIndex, (IntOptionalField<?, ?>) theField,
oldValue, newValue);
return intValueChange;
}
},
/** The long field change Type . */
LONG_FIELD {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final long oldValue = theChangeStorage.read(STRUCT.getOld()
.longField());
final long newValue = theChangeStorage.read(STRUCT.getNew()
.longField());
final LongValueChange longValueChange = theChangeObjects
.longValueChange();
longValueChange.update(theIndex, (LongField<?, ?>) theField,
oldValue, newValue);
return longValueChange;
}
},
/** The long global field change Type . */
LONG_GLOBAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final long oldValue = theChangeStorage.read(STRUCT.getOld()
.longField());
final long newValue = theChangeStorage.read(STRUCT.getNew()
.longField());
final LongValueChange longValueChange = theChangeObjects
.longValueChange();
longValueChange.update(theIndex, (LongGlobalField<?, ?>) theField,
oldValue, newValue);
return longValueChange;
}
},
/** The long optional field change Type . */
LONG_OPTIONAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final long oldValue = theChangeStorage.read(STRUCT.getOld()
.longField());
final long newValue = theChangeStorage.read(STRUCT.getNew()
.longField());
final LongValueChange longValueChange = theChangeObjects
.longValueChange();
longValueChange.update(theIndex, (LongOptionalField<?, ?>) theField,
oldValue, newValue);
return longValueChange;
}
},
/** The object field change Type . */
OBJECT_FIELD {
@SuppressWarnings({ "rawtypes", "unchecked", "null" })
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex, final Field theField,
final ValueChangeObjects theChangeObjects) {
final Object oldValue = ((AbstractStorage) theChangeStorage)
.readObject(STRUCT.getOld().objectField());
final Object newValue = ((AbstractStorage) theChangeStorage)
.readObject(STRUCT.getNew().objectField());
final ObjectValueChange objectValueChange = theChangeObjects
.objectValueChange();
objectValueChange.update(theIndex, theField, oldValue, newValue);
return objectValueChange;
}
},
/** The short field change Type . */
SHORT_FIELD {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final short oldValue = theChangeStorage.read(STRUCT.getOld()
.shortField());
final short newValue = theChangeStorage.read(STRUCT.getNew()
.shortField());
final ShortValueChange shortValueChange = theChangeObjects
.shortValueChange();
shortValueChange.update(theIndex, (ShortField<?, ?>) theField,
oldValue, newValue);
return shortValueChange;
}
},
/** The short global field change Type . */
SHORT_GLOBAL {
@SuppressWarnings("null")
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final short oldValue = theChangeStorage.read(STRUCT.getOld()
.shortField());
final short newValue = theChangeStorage.read(STRUCT.getNew()
.shortField());
final ShortValueChange shortValueChange = theChangeObjects
.shortValueChange();
shortValueChange.update(theIndex,
(ShortGlobalField<?, ?>) theField, oldValue, newValue);
return shortValueChange;
}
},
/** The short optional change Type . */
SHORT_OPTIONAL {
@Override
ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
final ValueChangeObjects theChangeObjects) {
final short oldValue = theChangeStorage.read(STRUCT.getOld()
.shortField());
final short newValue = theChangeStorage.read(STRUCT.getNew()
.shortField());
final ShortValueChange shortValueChange = theChangeObjects
.shortValueChange();
shortValueChange.update(theIndex, (ShortOptionalField<?, ?>) theField,
oldValue, newValue);
return shortValueChange;
}
};
/** Builds a ValueChange instance by reading the data from the "change" Storage. */
abstract ValueChange<?> loadFromStorage(final Storage theChangeStorage,
final int theIndex,
@SuppressWarnings("rawtypes") final Field theField,
ValueChangeObjects theChangeObjects);
}
| |
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lite.android.launcher3.allapps;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.PointF;
import android.graphics.Rect;
import android.support.v4.view.accessibility.AccessibilityRecordCompat;
import android.support.v4.view.accessibility.AccessibilityEventCompat;
import android.net.Uri;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.accessibility.AccessibilityEvent;
import android.widget.TextView;
import com.lite.android.launcher3.AppInfo;
import com.lite.android.launcher3.BaseRecyclerViewFastScrollBar.FastScrollFocusApplicator;
import com.lite.android.launcher3.BaseRecyclerViewFastScrollBar.FastScrollFocusable;
import com.lite.android.launcher3.BubbleTextView;
import com.lite.android.launcher3.Launcher;
import com.lite.android.launcher3.R;
import com.lite.android.launcher3.RemoteFolderManager;
import com.lite.android.launcher3.Utilities;
import com.lite.android.launcher3.settings.SettingsProvider;
import com.lite.android.launcher3.util.Thunk;
import java.util.HashMap;
import java.util.List;
/**
* The grid view adapter of all the apps.
*/
public class AllAppsGridAdapter extends RecyclerView.Adapter<AllAppsGridAdapter.ViewHolder> {
public static final String TAG = "AppsGridAdapter";
private static final boolean DEBUG = false;
// A section break in the grid
public static final int SECTION_BREAK_VIEW_TYPE = 0;
// A normal icon
public static final int ICON_VIEW_TYPE = 1;
// A prediction icon
public static final int PREDICTION_ICON_VIEW_TYPE = 2;
// The message shown when there are no filtered results
public static final int EMPTY_SEARCH_VIEW_TYPE = 3;
// A divider that separates the apps list and the search market button
public static final int SEARCH_MARKET_DIVIDER_VIEW_TYPE = 4;
// The message to continue to a market search when there are no filtered results
public static final int SEARCH_MARKET_VIEW_TYPE = 5;
// Section header for customized predicated apps.
public static final int CUSTOM_PREDICTED_APPS_HEADER_VIEW_TYPE = 6;
// Additional spacing between predicted apps and regular apps.
public static final int CUSTOM_PREDICTED_APPS_FOOTER_VIEW_TYPE = 7;
private boolean mIconsDimmed = false;
private int mGridTheme;
private AlphabeticalAppsList.SectionInfo mFocusedSection;
/**
* ViewHolder for each icon.
*/
public static class ViewHolder extends RecyclerView.ViewHolder {
public View mContent;
public ViewHolder(View v) {
super(v);
mContent = v;
}
}
/**
* A subclass of GridLayoutManager that overrides accessibility values during app search.
*/
public class AppsGridLayoutManager extends GridLayoutManager {
public AppsGridLayoutManager(Context context) {
super(context, 1, GridLayoutManager.VERTICAL, false);
}
@Override
public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
super.onInitializeAccessibilityEvent(event);
// Ensure that we only report the number apps for accessibility not including other
// adapter views
final AccessibilityRecordCompat record = AccessibilityEventCompat
.asRecord(event);
record.setItemCount(mApps.getNumFilteredApps());
}
@Override
public int getRowCountForAccessibility(RecyclerView.Recycler recycler,
RecyclerView.State state) {
if (mApps.hasNoFilteredResults()) {
// Disregard the no-search-results text as a list item for accessibility
return 0;
} else {
return super.getRowCountForAccessibility(recycler, state);
}
}
}
/**
* Helper class to size the grid items.
*/
public class GridSpanSizer extends GridLayoutManager.SpanSizeLookup {
public GridSpanSizer() {
super();
setSpanIndexCacheEnabled(true);
}
@Override
public int getSpanSize(int position) {
switch (mApps.getAdapterItems().get(position).viewType) {
case AllAppsGridAdapter.ICON_VIEW_TYPE:
case AllAppsGridAdapter.PREDICTION_ICON_VIEW_TYPE:
return 1;
default:
// Section breaks span the full width
return mAppsPerRow;
}
}
}
/**
* Helper class to draw the section headers
*/
public class GridItemDecoration extends RecyclerView.ItemDecoration {
private static final boolean DEBUG_SECTION_MARGIN = false;
private static final boolean FADE_OUT_SECTIONS = true;
private HashMap<String, PointF> mCachedSectionBounds = new HashMap<>();
private Rect mTmpBounds = new Rect();
@Override
public void onDraw(Canvas c, RecyclerView parent, RecyclerView.State state) {
if (mApps.hasFilter() || mAppsPerRow == 0) {
return;
}
if (DEBUG_SECTION_MARGIN) {
Paint p = new Paint();
p.setColor(0x33ff0000);
c.drawRect(mBackgroundPadding.left, 0, mBackgroundPadding.left + mSectionNamesMargin,
parent.getMeasuredHeight(), p);
}
List<AlphabeticalAppsList.AdapterItem> items = mApps.getAdapterItems();
boolean hasDrawnPredictedAppsDivider = false;
boolean showSectionNames = mSectionNamesMargin > 0;
int childCount = parent.getChildCount();
int lastSectionTop = 0;
int lastSectionHeight = 0;
for (int i = 0; i < childCount; i++) {
View child = parent.getChildAt(i);
ViewHolder holder = (ViewHolder) parent.getChildViewHolder(child);
if (!isValidHolderAndChild(holder, child, items)) {
continue;
}
if (shouldDrawItemDivider(holder, items) && !hasDrawnPredictedAppsDivider) {
// Draw the divider under the predicted apps
int top = child.getTop() + child.getHeight() + mPredictionBarDividerOffset;
c.drawLine(mBackgroundPadding.left, top,
parent.getWidth() - mBackgroundPadding.right, top,
mPredictedAppsDividerPaint);
hasDrawnPredictedAppsDivider = true;
// Only customized predicted apps will draw a section name.
if (!mApps.mCustomPredictedAppsEnabled) continue;
}
if (showSectionNames && shouldDrawItemSection(holder, items)) {
// Draw the section name for the first visible item
int viewTopOffset = (2 * child.getPaddingTop());
int pos = holder.getPosition();
AlphabeticalAppsList.AdapterItem item = items.get(pos);
AlphabeticalAppsList.SectionInfo sectionInfo = item.sectionInfo;
String lastSectionName = item.sectionName;
// Find the section name bounds
PointF sectionBounds = getAndCacheSectionBounds(lastSectionName);
// Calculate where to draw the section
int sectionBaseline = (int) (viewTopOffset + sectionBounds.y);
int x = mIsRtl ?
parent.getWidth() - mBackgroundPadding.left - mSectionNamesMargin :
mBackgroundPadding.left;
x += (int) ((mSectionNamesMargin - sectionBounds.x) / 2f);
int y;
boolean fixedToRow = false;
if (item.viewType == PREDICTION_ICON_VIEW_TYPE) {
y = child.getTop() - (int) mSectionTextPaint.getTextSize() / 2;
} else {
y = child.getTop() + sectionBaseline;
// Determine whether this is the last row with apps in that section, if
// so, then fix the section to the row allowing it to scroll past the
// baseline, otherwise, bound it to the baseline so it's in the viewport
int appIndexInSection = items.get(pos).sectionAppIndex;
int nextRowPos = Math.min(items.size() - 1,
pos + mAppsPerRow - (appIndexInSection % mAppsPerRow));
AlphabeticalAppsList.AdapterItem nextRowItem = items.get(nextRowPos);
fixedToRow = !lastSectionName.equals(nextRowItem.sectionName);
if (!fixedToRow) {
y = Math.max(sectionBaseline, y);
}
// In addition, if it overlaps with the last section that was drawn, then
// offset it so that it does not overlap
if (lastSectionHeight > 0 && y <= (lastSectionTop + lastSectionHeight)) {
y += lastSectionTop - y + lastSectionHeight;
}
}
// Draw the section header
if (FADE_OUT_SECTIONS) {
int alpha = 255;
if (fixedToRow) {
alpha = Math.min(255,
(int) (255 * (Math.max(0, y) / (float) sectionBaseline)));
}
mSectionTextPaint.setAlpha(alpha);
}
c.drawText(lastSectionName, x, y, mSectionTextPaint);
lastSectionTop = y;
lastSectionHeight = (int) (sectionBounds.y + mSectionHeaderOffset);
i += (sectionInfo.numApps - item.sectionAppIndex);
}
}
}
@Override
public void getItemOffsets(Rect outRect, View view, RecyclerView parent,
RecyclerView.State state) {
// Do nothing
}
/**
* Given a section name, return the bounds of the given section name.
*/
private PointF getAndCacheSectionBounds(String sectionName) {
PointF bounds = mCachedSectionBounds.get(sectionName);
if (bounds == null) {
mSectionTextPaint.getTextBounds(sectionName, 0, sectionName.length(), mTmpBounds);
bounds = new PointF(mSectionTextPaint.measureText(sectionName), mTmpBounds.height());
mCachedSectionBounds.put(sectionName, bounds);
}
return bounds;
}
/**
* Returns whether we consider this a valid view holder for us to draw a divider or section for.
*/
private boolean isValidHolderAndChild(ViewHolder holder, View child,
List<AlphabeticalAppsList.AdapterItem> items) {
// Ensure item is not already removed
GridLayoutManager.LayoutParams lp = (GridLayoutManager.LayoutParams)
child.getLayoutParams();
if (lp.isItemRemoved()) {
return false;
}
// Ensure we have a valid holder
if (holder == null) {
return false;
}
// Ensure we have a holder position
int pos = holder.getPosition();
if (pos < 0 || pos >= items.size()) {
return false;
}
return true;
}
/**
* Returns whether to draw the divider for a given child.
*/
private boolean shouldDrawItemDivider(ViewHolder holder,
List<AlphabeticalAppsList.AdapterItem> items) {
int pos = holder.getPosition();
return items.get(pos).viewType == AllAppsGridAdapter.PREDICTION_ICON_VIEW_TYPE;
}
/**
* Returns whether to draw the section for the given child.
*/
private boolean shouldDrawItemSection(ViewHolder holder,
List<AlphabeticalAppsList.AdapterItem> items) {
int pos = holder.getPosition();
AlphabeticalAppsList.AdapterItem item = items.get(pos);
// Ensure it's an icon
if (item.viewType != ICON_VIEW_TYPE && item.viewType != PREDICTION_ICON_VIEW_TYPE) {
return false;
}
return true;
}
}
private final RemoteFolderManager mRemoteFolderManager;
private Launcher mLauncher;
private LayoutInflater mLayoutInflater;
@Thunk AlphabeticalAppsList mApps;
private GridLayoutManager mGridLayoutMgr;
private GridSpanSizer mGridSizer;
private GridItemDecoration mItemDecoration;
private View.OnTouchListener mTouchListener;
private View.OnClickListener mIconClickListener;
private View.OnLongClickListener mIconLongClickListener;
@Thunk final Rect mBackgroundPadding = new Rect();
@Thunk int mPredictionBarDividerOffset;
@Thunk int mAppsPerRow;
@Thunk boolean mIsRtl;
// The text to show when there are no search results and no market search handler.
private String mEmptySearchMessage;
// The name of the market app which handles searches, to be used in the format str
// below when updating the search-market view. Only needs to be loaded once.
private String mMarketAppName;
// The text to show when there is a market app which can handle a specific query, updated
// each time the search query changes.
private String mMarketSearchMessage;
// The intent to send off to the market app, updated each time the search query changes.
private Intent mMarketSearchIntent;
// The last query that the user entered into the search field
private String mLastSearchQuery;
// Section drawing
@Thunk int mSectionNamesMargin;
@Thunk int mSectionHeaderOffset;
@Thunk int mSectionStrategy;
@Thunk Paint mSectionTextPaint;
@Thunk Paint mPredictedAppsDividerPaint;
private int mAllAppsTextColor;
private int mCustomPredictedAppsHeaderHeight;
private int mCustomPredictedAppsFooterHeight;
public AllAppsGridAdapter(Launcher launcher, AlphabeticalAppsList apps,
View.OnTouchListener touchListener, View.OnClickListener iconClickListener,
View.OnLongClickListener iconLongClickListener) {
Resources res = launcher.getResources();
mLauncher = launcher;
mApps = apps;
mEmptySearchMessage = res.getString(R.string.all_apps_loading_message);
mGridSizer = new GridSpanSizer();
mGridLayoutMgr = new AppsGridLayoutManager(launcher);
mGridLayoutMgr.setSpanSizeLookup(mGridSizer);
mItemDecoration = new GridItemDecoration();
mLayoutInflater = LayoutInflater.from(launcher);
mTouchListener = touchListener;
mIconClickListener = iconClickListener;
mIconLongClickListener = iconLongClickListener;
mSectionNamesMargin = mSectionStrategy ==
AllAppsContainerView.SECTION_STRATEGY_GRID ?
res.getDimensionPixelSize(R.dimen.all_apps_grid_view_start_margin) :
res.getDimensionPixelSize(R.dimen.all_apps_grid_view_start_margin_with_sections);
mAllAppsTextColor = mGridTheme == AllAppsContainerView.GRID_THEME_DARK ?
res.getColor(R.color.quantum_panel_text_color_dark) :
res.getColor(R.color.quantum_panel_text_color);
mSectionHeaderOffset = res.getDimensionPixelSize(R.dimen.all_apps_grid_section_y_offset);
mSectionTextPaint = new Paint();
mSectionTextPaint.setTextSize(res.getDimensionPixelSize(
R.dimen.all_apps_grid_section_text_size));
int sectionTextColorId = mGridTheme == AllAppsContainerView.GRID_THEME_DARK ?
R.color.all_apps_grid_section_text_color_dark :
R.color.all_apps_grid_section_text_color;
mSectionTextPaint.setColor(res.getColor(sectionTextColorId));
mSectionTextPaint.setAntiAlias(true);
mPredictedAppsDividerPaint = new Paint();
mPredictedAppsDividerPaint.setStrokeWidth(Utilities.pxFromDp(1f, res.getDisplayMetrics()));
mPredictedAppsDividerPaint.setColor(0x1E000000);
mPredictedAppsDividerPaint.setAntiAlias(true);
mPredictionBarDividerOffset =
res.getDimensionPixelSize(R.dimen.all_apps_prediction_bar_divider_offset);
// Resolve the market app handling additional searches
PackageManager pm = launcher.getPackageManager();
ResolveInfo marketInfo = pm.resolveActivity(createMarketSearchIntent(""),
PackageManager.MATCH_DEFAULT_ONLY);
if (marketInfo != null) {
mMarketAppName = marketInfo.loadLabel(pm).toString();
}
mRemoteFolderManager = launcher.getRemoteFolderManager();
}
/**
* Sets the number of apps per row.
*/
public void setNumAppsPerRow(int appsPerRow) {
mAppsPerRow = appsPerRow;
mGridLayoutMgr.setSpanCount(appsPerRow);
}
/**
* Sets whether we are in RTL mode.
*/
public void setRtl(boolean rtl) {
mIsRtl = rtl;
}
public void setSectionStrategy(int sectionStrategy) {
Resources res = mLauncher.getResources();
mSectionStrategy = sectionStrategy;
mSectionNamesMargin = mSectionStrategy ==
AllAppsContainerView.SECTION_STRATEGY_GRID ?
res.getDimensionPixelSize(R.dimen.all_apps_grid_view_start_margin) :
res.getDimensionPixelSize(R.dimen.all_apps_grid_view_start_margin_with_sections);
}
/**
* Sets the last search query that was made, used to show when there are no results and to also
* seed the intent for searching the market.
*/
public void setLastSearchQuery(String query) {
Resources res = mLauncher.getResources();
String formatStr = res.getString(R.string.all_apps_no_search_results);
mLastSearchQuery = query;
mEmptySearchMessage = String.format(formatStr, query);
if (mMarketAppName != null) {
mMarketSearchMessage = String.format(res.getString(R.string.all_apps_search_market_message),
mMarketAppName);
mMarketSearchIntent = createMarketSearchIntent(query);
}
}
/**
* Notifies the adapter of the background padding so that it can draw things correctly in the
* item decorator.
*/
public void updateBackgroundPadding(Rect padding) {
mBackgroundPadding.set(padding);
}
/**
* Returns the grid layout manager.
*/
public GridLayoutManager getLayoutManager() {
return mGridLayoutMgr;
}
/**
* Returns the item decoration for the recycler view.
*/
public RecyclerView.ItemDecoration getItemDecoration() {
// We don't draw any headers when we are uncomfortably dense
return mItemDecoration;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
boolean hideIconLabels = SettingsProvider.getBoolean(mLauncher,
SettingsProvider.SETTINGS_UI_DRAWER_HIDE_ICON_LABELS,
R.bool.preferences_interface_drawer_hide_icon_labels_default);
switch (viewType) {
case SECTION_BREAK_VIEW_TYPE:
return new ViewHolder(new View(parent.getContext()));
case ICON_VIEW_TYPE: {
BubbleTextView icon = (BubbleTextView) mLayoutInflater.inflate(
R.layout.all_apps_icon, parent, false);
if (hideIconLabels) {
icon.setTextVisibility(!hideIconLabels);
}
icon.setOnTouchListener(mTouchListener);
icon.setOnClickListener(mIconClickListener);
icon.setOnLongClickListener(mIconLongClickListener);
icon.setLongPressTimeout(ViewConfiguration.get(parent.getContext())
.getLongPressTimeout());
icon.setFocusable(true);
FastScrollFocusApplicator.createApplicator(icon,
FastScrollFocusable.FAST_SCROLL_FOCUS_DIMMABLE |
FastScrollFocusable.FAST_SCROLL_FOCUS_SCALABLE);
return new ViewHolder(icon);
}
case PREDICTION_ICON_VIEW_TYPE: {
BubbleTextView icon = (BubbleTextView) mLayoutInflater.inflate(
R.layout.all_apps_prediction_bar_icon, parent, false);
if (hideIconLabels) {
icon.setTextVisibility(!hideIconLabels);
}
icon.setOnTouchListener(mTouchListener);
icon.setOnClickListener(mIconClickListener);
icon.setOnLongClickListener(mIconLongClickListener);
icon.setLongPressTimeout(ViewConfiguration.get(parent.getContext())
.getLongPressTimeout());
icon.setFocusable(true);
FastScrollFocusApplicator.createApplicator(icon,
FastScrollFocusable.FAST_SCROLL_FOCUS_DIMMABLE |
FastScrollFocusable.FAST_SCROLL_FOCUS_SCALABLE);
ViewHolder holder = new ViewHolder(icon);
mRemoteFolderManager.onCreateViewHolder(holder, viewType);
return holder;
}
case EMPTY_SEARCH_VIEW_TYPE:
return new ViewHolder(mLayoutInflater.inflate(R.layout.all_apps_empty_search,
parent, false));
case SEARCH_MARKET_DIVIDER_VIEW_TYPE:
return new ViewHolder(mLayoutInflater.inflate(R.layout.all_apps_search_market_divider,
parent, false));
case SEARCH_MARKET_VIEW_TYPE:
View searchMarketView = mLayoutInflater.inflate(R.layout.all_apps_search_market,
parent, false);
searchMarketView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mLauncher.startSearchFromAllApps(v, mMarketSearchIntent, mLastSearchQuery);
}
});
return new ViewHolder(searchMarketView);
case CUSTOM_PREDICTED_APPS_HEADER_VIEW_TYPE: {
View v = mLayoutInflater.inflate(
R.layout.custom_predicted_apps_header, parent, false);
FastScrollFocusApplicator.createApplicator(v,
FastScrollFocusable.FAST_SCROLL_FOCUS_DIMMABLE);
ViewHolder holder = new ViewHolder(v);
mRemoteFolderManager.onCreateViewHolder(holder, viewType);
return holder;
}
case CUSTOM_PREDICTED_APPS_FOOTER_VIEW_TYPE: {
View v = mLayoutInflater.inflate(R.layout.custom_predicted_apps_footer,
parent, false);
ViewHolder holder = new ViewHolder(v);
mRemoteFolderManager.onCreateViewHolder(holder, viewType);
return holder;
}
default:
throw new RuntimeException("Unexpected view type");
}
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
boolean hideIconLabels = SettingsProvider.getBoolean(mLauncher,
SettingsProvider.SETTINGS_UI_DRAWER_HIDE_ICON_LABELS,
R.bool.preferences_interface_drawer_hide_icon_labels_default);
FastScrollFocusApplicator.setFastScrollDimmed(holder.mContent, false, false);
FastScrollFocusApplicator.setFastScrollFocused(holder.mContent, false, false);
switch (holder.getItemViewType()) {
case ICON_VIEW_TYPE: {
AppInfo info = mApps.getAdapterItems().get(position).appInfo;
BubbleTextView icon = (BubbleTextView) holder.mContent;
icon.setTextColor(mAllAppsTextColor);
if (hideIconLabels) {
icon.setTextVisibility(!hideIconLabels);
}
icon.applyFromApplicationInfo(info);
FastScrollFocusApplicator.setFastScrollDimmed(icon, shouldDimPosition(position),
!mIconsDimmed);
FastScrollFocusApplicator.setFastScrollFocused(icon, false, !mIconsDimmed);
break;
}
case PREDICTION_ICON_VIEW_TYPE: {
AppInfo info = mApps.getAdapterItems().get(position).appInfo;
BubbleTextView icon = (BubbleTextView) holder.mContent;
icon.setTextColor(mAllAppsTextColor);
if (hideIconLabels) {
icon.setTextVisibility(!hideIconLabels);
}
icon.applyFromApplicationInfo(info);
FastScrollFocusApplicator.setFastScrollDimmed(icon, shouldDimPosition(position),
!mIconsDimmed);
FastScrollFocusApplicator.setFastScrollFocused(icon, false, !mIconsDimmed);
mRemoteFolderManager.onBindViewHolder(holder, info);
break;
}
case EMPTY_SEARCH_VIEW_TYPE:
TextView emptyViewText = (TextView) holder.mContent;
emptyViewText.setText(mEmptySearchMessage);
emptyViewText.setGravity(mApps.hasNoFilteredResults() ? Gravity.CENTER :
Gravity.START | Gravity.CENTER_VERTICAL);
break;
case SEARCH_MARKET_VIEW_TYPE:
TextView searchView = (TextView) holder.mContent;
if (mMarketSearchIntent != null) {
searchView.setVisibility(View.VISIBLE);
searchView.setContentDescription(mMarketSearchMessage);
searchView.setGravity(mApps.hasNoFilteredResults() ? Gravity.CENTER :
Gravity.START | Gravity.CENTER_VERTICAL);
searchView.setText(mMarketSearchMessage);
} else {
searchView.setVisibility(View.GONE);
}
break;
case CUSTOM_PREDICTED_APPS_HEADER_VIEW_TYPE: {
TextView title = (TextView) holder.mContent.findViewById(R.id.title);
title.setTextColor(mAllAppsTextColor);
FastScrollFocusApplicator.setFastScrollDimmed(holder.mContent,
shouldDimPosition(position), !mIconsDimmed);
FastScrollFocusApplicator.setFastScrollFocused(holder.mContent, false, !mIconsDimmed);
ViewGroup.MarginLayoutParams lp =
(ViewGroup.MarginLayoutParams) holder.mContent.getLayoutParams();
mCustomPredictedAppsHeaderHeight = holder.mContent.getHeight() +
lp.topMargin + lp.bottomMargin;
break;
}
case CUSTOM_PREDICTED_APPS_FOOTER_VIEW_TYPE:
ViewGroup.MarginLayoutParams lp =
(ViewGroup.MarginLayoutParams) holder.mContent.getLayoutParams();
mCustomPredictedAppsFooterHeight = holder.mContent.getHeight() +
lp.topMargin + lp.bottomMargin;
}
}
private boolean shouldDimPosition(int position) {
if (mFocusedSection != null && mIconsDimmed) {
if (position >= mFocusedSection.firstAppItem.position &&
position < mFocusedSection.firstAppItem.position +
mFocusedSection.numApps) {
return false;
}
}
return mIconsDimmed;
}
public int getCustomPredictedAppsOffset(int rowIndex) {
int offset = mCustomPredictedAppsHeaderHeight;
if (rowIndex > 0) offset += mCustomPredictedAppsFooterHeight;
return offset;
}
@Override
public int getItemCount() {
return mApps.getAdapterItems().size();
}
@Override
public int getItemViewType(int position) {
AlphabeticalAppsList.AdapterItem item = mApps.getAdapterItems().get(position);
return item.viewType;
}
public void setIconsDimmed(boolean iconsDimmed) {
if (mIconsDimmed != iconsDimmed) {
mIconsDimmed = iconsDimmed;
notifyDataSetChanged();
}
}
public void setFocusedSection(
AlphabeticalAppsList.SectionInfo focusedSection) {
mFocusedSection = focusedSection;
}
public void setGridTheme(int gridTheme) {
mGridTheme = gridTheme;
int sectionTextColorId = mGridTheme == AllAppsContainerView.GRID_THEME_DARK ?
R.color.all_apps_grid_section_text_color_dark :
R.color.all_apps_grid_section_text_color;
mSectionTextPaint.setColor(mLauncher.getColor(sectionTextColorId));
mAllAppsTextColor = mGridTheme == AllAppsContainerView.GRID_THEME_DARK ?
mLauncher.getColor(R.color.quantum_panel_text_color_dark) :
mLauncher.getColor(R.color.quantum_panel_text_color);
int mPredictedAppsDividerColorId = mGridTheme == AllAppsContainerView.GRID_THEME_DARK ?
R.color.drawer_divider_dark : R.color.drawer_divider_light;
mPredictedAppsDividerPaint.setColor(mLauncher.getColor(mPredictedAppsDividerColorId));
}
/**
* Creates a new market search intent.
*/
private Intent createMarketSearchIntent(String query) {
Uri marketSearchUri = Uri.parse("market://search")
.buildUpon()
.appendQueryParameter("q", query)
.build();
Intent marketSearchIntent = new Intent(Intent.ACTION_VIEW);
marketSearchIntent.setData(marketSearchUri);
return marketSearchIntent;
}
}
| |
package com.smart.cloud.fire.view;
/**
* Created by Administrator on 2016/8/4.
*/
import android.content.Context;
import android.os.Handler;
import android.os.Message;
import android.view.GestureDetector;
import android.view.GestureDetector.SimpleOnGestureListener;
import android.view.MotionEvent;
import android.view.animation.Interpolator;
import android.widget.Scroller;
/**
* Scroller class handles scrolling events and updates the
*/
public class WheelScroller {
/**
* Scrolling listener interface
*/
public interface ScrollingListener {
/**
* Scrolling callback called when scrolling is performed.
*
* @param distance
* the distance to scroll
*/
void onScroll(int distance);
/**
* Starting callback called when scrolling is started
*/
void onStarted();
/**
* Finishing callback called after justifying
*/
void onFinished();
/**
* Justifying callback called to justify a view when scrolling is ended
*/
void onJustify();
}
/** Scrolling duration */
private static final int SCROLLING_DURATION = 400;
/** Minimum delta for scrolling */
public static final int MIN_DELTA_FOR_SCROLLING = 1;
// Listener
private ScrollingListener listener;
// Context
private Context context;
// Scrolling
private GestureDetector gestureDetector;
private Scroller scroller;
private int lastScrollY;
private float lastTouchedY;
private boolean isScrollingPerformed;
/**
* Constructor
*
* @param context
* the current context
* @param listener
* the scrolling listener
*/
public WheelScroller(Context context, ScrollingListener listener) {
gestureDetector = new GestureDetector(context, gestureListener);
gestureDetector.setIsLongpressEnabled(false);
scroller = new Scroller(context);
this.listener = listener;
this.context = context;
}
/**
* Set the the specified scrolling interpolator
*
* @param interpolator
* the interpolator
*/
public void setInterpolator(Interpolator interpolator) {
scroller.forceFinished(true);
scroller = new Scroller(context, interpolator);
}
/**
* Scroll the wheel
*
* @param distance
* the scrolling distance
* @param time
* the scrolling duration
*/
public void scroll(int distance, int time) {
scroller.forceFinished(true);
lastScrollY = 0;
scroller.startScroll(0, 0, 0, distance, time != 0 ? time
: SCROLLING_DURATION);
setNextMessage(MESSAGE_SCROLL);
startScrolling();
}
/**
* Stops scrolling
*/
public void stopScrolling() {
scroller.forceFinished(true);
}
/**
* Handles Touch event
*
* @param event
* the motion event
* @return
*/
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
lastTouchedY = event.getY();
scroller.forceFinished(true);
clearMessages();
break;
case MotionEvent.ACTION_MOVE:
// perform scrolling
int distanceY = (int) (event.getY() - lastTouchedY);
if (distanceY != 0) {
startScrolling();
listener.onScroll(distanceY);
lastTouchedY = event.getY();
}
break;
}
if (!gestureDetector.onTouchEvent(event)
&& event.getAction() == MotionEvent.ACTION_UP) {
justify();
}
return true;
}
// gesture listener
private SimpleOnGestureListener gestureListener = new SimpleOnGestureListener() {
public boolean onScroll(MotionEvent e1, MotionEvent e2,
float distanceX, float distanceY) {
// Do scrolling in onTouchEvent() since onScroll() are not call
// immediately
// when user touch and move the wheel
return true;
}
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX,
float velocityY) {
lastScrollY = 0;
final int maxY = 0x7FFFFFFF;
final int minY = -maxY;
scroller.fling(0, lastScrollY, 0, (int) -velocityY, 0, 0, minY,
maxY);
setNextMessage(MESSAGE_SCROLL);
return true;
}
};
// Messages
private final int MESSAGE_SCROLL = 0;
private final int MESSAGE_JUSTIFY = 1;
/**
* Set next message to queue. Clears queue before.
*
* @param message
* the message to set
*/
private void setNextMessage(int message) {
clearMessages();
animationHandler.sendEmptyMessage(message);
}
/**
* Clears messages from queue
*/
private void clearMessages() {
animationHandler.removeMessages(MESSAGE_SCROLL);
animationHandler.removeMessages(MESSAGE_JUSTIFY);
}
// animation handler
private Handler animationHandler = new Handler() {
public void handleMessage(Message msg) {
scroller.computeScrollOffset();
int currY = scroller.getCurrY();
int delta = lastScrollY - currY;
lastScrollY = currY;
if (delta != 0) {
listener.onScroll(delta);
}
// scrolling is not finished when it comes to final Y
// so, finish it manually
if (Math.abs(currY - scroller.getFinalY()) < MIN_DELTA_FOR_SCROLLING) {
currY = scroller.getFinalY();
scroller.forceFinished(true);
}
if (!scroller.isFinished()) {
animationHandler.sendEmptyMessage(msg.what);
} else if (msg.what == MESSAGE_SCROLL) {
justify();
} else {
finishScrolling();
}
}
};
/**
* Justifies wheel
*/
private void justify() {
listener.onJustify();
setNextMessage(MESSAGE_JUSTIFY);
}
/**
* Starts scrolling
*/
private void startScrolling() {
if (!isScrollingPerformed) {
isScrollingPerformed = true;
listener.onStarted();
}
}
/**
* Finishes scrolling
*/
void finishScrolling() {
if (isScrollingPerformed) {
listener.onFinished();
isScrollingPerformed = false;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.paging.impl;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.core.io.IOCriticalErrorListener;
import org.apache.activemq.artemis.core.io.SequentialFileFactory;
import org.apache.activemq.artemis.core.io.nio.NIOSequentialFileFactory;
import org.apache.activemq.artemis.core.paging.PagingManager;
import org.apache.activemq.artemis.core.paging.PagingStore;
import org.apache.activemq.artemis.core.paging.PagingStoreFactory;
import org.apache.activemq.artemis.core.paging.cursor.PageCursorProvider;
import org.apache.activemq.artemis.core.paging.cursor.impl.PageCursorProviderImpl;
import org.apache.activemq.artemis.core.persistence.StorageManager;
import org.apache.activemq.artemis.core.server.ActiveMQServerLogger;
import org.apache.activemq.artemis.core.server.files.FileMoveManager;
import org.apache.activemq.artemis.core.server.files.FileStoreMonitor;
import org.apache.activemq.artemis.core.settings.HierarchicalRepository;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.utils.ExecutorFactory;
import org.apache.activemq.artemis.utils.FileUtil;
import org.apache.activemq.artemis.utils.UUIDGenerator;
import org.apache.activemq.artemis.utils.actors.ArtemisExecutor;
/**
* Integration point between Paging and NIO
*/
public class PagingStoreFactoryNIO implements PagingStoreFactory {
// Constants -----------------------------------------------------
private static final String ADDRESS_FILE = "address.txt";
// Attributes ----------------------------------------------------
private final File directory;
private final ExecutorFactory executorFactory;
private final boolean syncNonTransactional;
private PagingManager pagingManager;
private final ScheduledExecutorService scheduledExecutor;
private final long syncTimeout;
private final StorageManager storageManager;
private final IOCriticalErrorListener critialErrorListener;
public File getDirectory() {
return directory;
}
public ExecutorFactory getExecutorFactory() {
return executorFactory;
}
public boolean isSyncNonTransactional() {
return syncNonTransactional;
}
public PagingManager getPagingManager() {
return pagingManager;
}
public long getSyncTimeout() {
return syncTimeout;
}
public StorageManager getStorageManager() {
return storageManager;
}
public IOCriticalErrorListener getCritialErrorListener() {
return critialErrorListener;
}
public PagingStoreFactoryNIO(final StorageManager storageManager,
final File directory,
final long syncTimeout,
final ScheduledExecutorService scheduledExecutor,
final ExecutorFactory executorFactory,
final boolean syncNonTransactional,
final IOCriticalErrorListener critialErrorListener) {
this.storageManager = storageManager;
this.directory = directory;
this.executorFactory = executorFactory;
this.syncNonTransactional = syncNonTransactional;
this.scheduledExecutor = scheduledExecutor;
this.syncTimeout = syncTimeout;
this.critialErrorListener = critialErrorListener;
}
// Public --------------------------------------------------------
@Override
public ScheduledExecutorService getScheduledExecutor() {
return scheduledExecutor;
}
@Override
public Executor newExecutor() {
return executorFactory.getExecutor();
}
@Override
public void stop() {
}
@Override
public void injectMonitor(FileStoreMonitor monitor) throws Exception {
monitor.addStore(this.directory);
}
@Override
public PageCursorProvider newCursorProvider(PagingStore store,
StorageManager storageManager,
AddressSettings addressSettings,
ArtemisExecutor executor) {
return new PageCursorProviderImpl(store, storageManager, executor, addressSettings.getPageCacheMaxSize());
}
@Override
public synchronized PagingStore newStore(final SimpleString address, final AddressSettings settings) {
return new PagingStoreImpl(address, scheduledExecutor, syncTimeout, pagingManager, storageManager, null, this, address, settings, executorFactory.getExecutor(), executorFactory.getExecutor(), syncNonTransactional);
}
@Override
public synchronized SequentialFileFactory newFileFactory(final SimpleString address) throws Exception {
String guid = UUIDGenerator.getInstance().generateStringUUID();
SequentialFileFactory factory = newFileFactory(guid);
factory.createDirs();
File fileWithID = new File(directory, guid + File.separatorChar + PagingStoreFactoryNIO.ADDRESS_FILE);
try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileWithID)))) {
writer.write(address.toString());
writer.newLine();
}
return factory;
}
@Override
public synchronized void removeFileFactory(SequentialFileFactory fileFactory) throws Exception {
File directory = fileFactory.getDirectory();
if (directory.exists()) {
FileUtil.deleteDirectory(directory);
}
}
@Override
public void setPagingManager(final PagingManager pagingManager) {
this.pagingManager = pagingManager;
}
@Override
public List<PagingStore> reloadStores(final HierarchicalRepository<AddressSettings> addressSettingsRepository) throws Exception {
File[] files = directory.listFiles();
if (files == null) {
return Collections.<PagingStore>emptyList();
} else {
ArrayList<PagingStore> storesReturn = new ArrayList<>(files.length);
for (File file : files) {
final String guid = file.getName();
final File addressFile = new File(file, PagingStoreFactoryNIO.ADDRESS_FILE);
if (!addressFile.exists()) {
// This means this folder is from a replication copy, nothing to worry about it, we just skip it
if (!file.getName().contains(FileMoveManager.PREFIX)) {
ActiveMQServerLogger.LOGGER.pageStoreFactoryNoIdFile(file.toString(), PagingStoreFactoryNIO.ADDRESS_FILE);
}
continue;
}
String addressString;
try (BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(addressFile)))) {
addressString = reader.readLine();
}
SimpleString address = new SimpleString(addressString);
SequentialFileFactory factory = newFileFactory(guid);
AddressSettings settings = addressSettingsRepository.getMatch(address.toString());
PagingStore store = new PagingStoreImpl(address, scheduledExecutor, syncTimeout, pagingManager, storageManager, factory, this, address, settings, executorFactory.getExecutor(), executorFactory.getExecutor(), syncNonTransactional);
storesReturn.add(store);
}
return storesReturn;
}
}
protected SequentialFileFactory newFileFactory(final String directoryName) {
return new NIOSequentialFileFactory(new File(directory, directoryName), false, critialErrorListener, 1);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.apm.service.charts.service;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.ProjectionList;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.apache.usergrid.apm.service.charts.filter.AppsFilter;
import org.apache.usergrid.apm.service.charts.filter.EndPeriodFilter;
import org.apache.usergrid.apm.service.charts.filter.NetworkCarrierFilter;
import org.apache.usergrid.apm.service.charts.filter.NetworkTypeFilter;
import org.apache.usergrid.apm.service.charts.filter.SavedChartsFilter;
import org.apache.usergrid.apm.service.charts.filter.SpecialTimeFilter;
import org.apache.usergrid.apm.service.charts.filter.StartPeriodFilter;
import org.apache.usergrid.apm.service.charts.filter.TimeRangeFilter;
import org.apache.usergrid.apm.model.LogChartCriteria;
public class LogChartUtil {
private static final Log log = LogFactory.getLog(LogChartUtil.class);
public static SqlOrderGroupWhere getOrdersAndGroupings (LogChartCriteria cq) {
StringBuffer groupings = new StringBuffer();
StringBuffer orders = new StringBuffer();
StringBuffer whereClause = new StringBuffer();
SqlOrderGroupWhere og = new SqlOrderGroupWhere();
SpecialTimeFilter timeFilter = new SpecialTimeFilter(cq);
whereClause.append("appId = " + cq.getAppId() + " AND chartCriteriaId = " + cq.getId() + " AND ");
if (cq.hasGrouping()) {
if (cq.isGroupedByApp())
{
orders.append("appId");
groupings.append("appId");
}
else if (cq.isGroupedByNetworkType())
{
orders.append("networkType");
groupings.append("networkType");
}
else if (cq.isGroupedByNetworkCarrier())
{
orders.append("networkCarrier");
groupings.append("networkCarrier");
}
else if (cq.isGroupedByAppVersion()) {
orders.append("applicationVersion");
groupings.append("applicationVersion");
}
else if (cq.isGroupedByAppConfigType()) {
orders.append("appConfigType");
groupings.append("appConfigType");
}
else if (cq.isGroupedByDeviceModel()) {
orders.append("deviceModel");
groupings.append("deviceModel");
}
else if (cq.isGroupedbyDevicePlatform()) {
orders.append("devicePlatform");
groupings.append("devicePlatform");
} else if (cq.isGroupedByDeviceOS()) {
orders.append("deviceOperatingSystem");
groupings.append("deviceOperatingSystem");
}
}
if (!groupings.toString().equals(""))
groupings.append(',');
if (!orders.toString().equals(""))
orders.append(',');
orders.append(timeFilter.getEndPropName());
groupings.append(timeFilter.getEndPropName());
whereClause.append(timeFilter.getEndPropName() + " >=" + timeFilter.getFrom() + " AND " +
timeFilter.getEndPropName() + "<" + timeFilter.getTo());
log.info ("Where clause is " + whereClause.toString());
log.info ("GroupBy clause is " + groupings.toString());
log.info ("OrderBy claluse is " + orders.toString());
og.groupBy = groupings.toString();
og.orderBy = orders.toString();
og.whereClause = whereClause.toString();
return og;
}
public static String getOrder (LogChartCriteria cq) {
String order = null;
switch (cq.getSamplePeriod())
{
//see http://stackoverflow.com/questions/84644/hibernate-query-by-example-and-projections on why "this." is needed
//in following lines
case MINUTE :
order = "endMinute";
break;
case HOUR :
order = "endHour";
break;
case DAY_WEEK :
order = "endDay";
break;
case DAY_MONTH :
order = "endWeek";
break;
case MONTH :
order = "endMonth";
break;
}
return order;
}
public static List<Criterion> getChartCriteriaForCacheTable(LogChartCriteria cq) {
List<Criterion> filters = new ArrayList<Criterion>();
//Narrow down by Chart Criteria ID and then by time
if (cq.getId() != null)
filters.add(new SavedChartsFilter(cq.getId()).getCriteria());
filters.add(new SpecialTimeFilter(cq).getCriteria());
return filters;
}
public static List<Order> getOrdersForChartCriteria (LogChartCriteria cq) {
List <Order> orders = new ArrayList <Order> ();
if (cq.isGroupedByApp())
{
orders.add(Order.asc("appId"));
}
else if (cq.isGroupedByNetworkType())
{
orders.add(Order.asc("networkType"));
}
else if (cq.isGroupedByNetworkCarrier())
{
orders.add(Order.asc("networkCarrier"));
}
else if (cq.isGroupedByAppVersion()) {
orders.add(Order.asc("applicationVersion"));
}
else if (cq.isGroupedByAppConfigType()) {
orders.add(Order.asc("appConfigType"));
}
else if (cq.isGroupedByDeviceModel()) {
orders.add(Order.asc("deviceModel"));
}
else if (cq.isGroupedbyDevicePlatform()) {
orders.add(Order.asc("devicePlatform"));
}
else if (cq.isGroupedByDeviceOS()) {
orders.add(Order.asc("deviceOperatingSystem"));
}
switch (cq.getSamplePeriod())
{
//see http://stackoverflow.com/questions/84644/hibernate-query-by-example-and-projections on why "this." is needed
//in following lines
case MINUTE : orders.add(Order.asc("endMinute")); break;
case HOUR : orders.add(Order.asc("endHour"));break;
case DAY_WEEK : orders.add(Order.asc("endDay"));break;
case DAY_MONTH : orders.add(Order.asc("endDay"));break;
case MONTH : orders.add(Order.asc("endMonth"));break;
}
return orders;
}
public static ProjectionList getProjectionList(LogChartCriteria cq)
{
ProjectionList projList = Projections.projectionList();
//Adding GroupBy. We will allow only one groupby so that chart looks cleaner.
if (cq.isGroupedByApp())
{
projList.add(Projections.groupProperty("this.appId"),"appId");
}
else if (cq.isGroupedByNetworkType())
{
projList.add(Projections.groupProperty("this.networkType"),"networkType");
}
else if (cq.isGroupedByNetworkCarrier())
{
projList.add(Projections.groupProperty("this.networkCarrier"),"networkCarrier");
}
switch (cq.getSamplePeriod())
{
//see http://stackoverflow.com/questions/84644/hibernate-query-by-example-and-projections on why "this." is needed
//in following lines
case MINUTE : projList.add(Projections.groupProperty("this.endMinute"),"endMinute"); break;
case HOUR : projList.add(Projections.groupProperty("this.endHour"),"endHour");break;
case DAY_WEEK : projList.add(Projections.groupProperty("this.endDay"),"endDay");break;
case DAY_MONTH : projList.add(Projections.groupProperty("this.endDay"),"endDay");break;
case MONTH : projList.add(Projections.groupProperty("this.endMonth"),"endMonth");break;
}
//may run into this bug because of alias http://stackoverflow.com/questions/84644/hibernate-query-by-example-and-projections
//And I did run into it. ouch. Fix was to add this.filedName !!
return projList;
}
public static List<Criterion> getChartCriteriaList(LogChartCriteria cq)
{
List<Criterion> filters = new ArrayList<Criterion>();
//Narrow down by app id first
if (cq.getAppId() != null)
filters.add(new AppsFilter(cq.getAppId()).getCriteria());
//Then by time range
filters.add(new StartPeriodFilter(cq).getCriteria());
filters.add(new EndPeriodFilter(cq).getCriteria());
if (cq.getNetworkCarrier() != null)
filters.add(new NetworkCarrierFilter(cq.getNetworkCarrier()).getCriteria());
if (cq.getNetworkType() != null)
filters.add(new NetworkTypeFilter(cq.getNetworkType()).getCriteria());
return filters;
}
public static SqlOrderGroupWhere getSqlStuff (LogRawCriteria lrc) {
SqlOrderGroupWhere ogw = new SqlOrderGroupWhere();
// SELECT count(*), log_level, log_message, max(time_stamp) from `ideawheel`.`CLIENT_LOG`
//where app_Id=1 group by log_message order by log_level desc
Long app_id = lrc.getChartCriteria().getAppId();
String applicationVersion = lrc.getChartCriteria().getAppVersion();
SpecialTimeFilter timeFilter = new SpecialTimeFilter(lrc.getChartCriteria());
StringBuffer whereString = new StringBuffer();
whereString.append("APP_ID =" + app_id );
whereString.append((" AND " + timeFilter.getEndPropName() + " >=" + timeFilter.getFrom() + " AND " +
timeFilter.getEndPropName() + "<=" + timeFilter.getTo()));
if (applicationVersion != null)
whereString.append(" AND applicationVersion ='" + applicationVersion + "'");
if (lrc.getLogLevel() != null)
whereString.append(" AND logLevel = " + lrc.getLogLevel());
if (lrc.getLogMessage() != null)
whereString.append (" AND logMessage like '%" + lrc.getLogMessage() + "%'");
ogw.whereClause = whereString.toString();
ogw.orderBy = timeFilter.getEndPropName() + " desc";
ogw.groupBy = "logMessage";
return ogw;
}
public static List<Criterion> getRawLogCriteriaList (LogRawCriteria logRawCriteria) {
LogChartCriteria cq = logRawCriteria.getChartCriteria();
List<Criterion> filters = new ArrayList<Criterion>();
//Narrow down by app id first
if (cq.getAppId() != null)
filters.add(new AppsFilter(cq.getAppId()).getCriteria());
//Then by time range
filters.add( new TimeRangeFilter(cq).getCriteria());
if (cq.getDeviceId() != null)
filters.add(Restrictions.like("deviceId", cq.getDeviceId().trim(),MatchMode.START));
if (logRawCriteria.getLogLevel() !=null)
filters.add( Restrictions.eq("logLevel",logRawCriteria.getLogLevel()));
if(logRawCriteria.getTag() != null)
filters.add(Restrictions.like("tag", logRawCriteria.getTag().trim(),MatchMode.START));
else {
if(logRawCriteria.isExcludeCrash())
filters.add(Restrictions.ne("tag", "CRASH"));
}
if (cq.getAppVersion() != null)
filters.add(Restrictions.like("applicationVersion", cq.getAppVersion().trim(),MatchMode.START));
if (cq.getAppConfigType() != null)
filters.add(Restrictions.like("appConfigType", cq.getAppConfigType(),MatchMode.START));
if (cq.getDevicePlatform() != null)
filters.add(Restrictions.like("devicePlatform", cq.getDevicePlatform().trim(), MatchMode.START));
if (cq.getDeviceModel() != null)
filters.add(Restrictions.like("deviceModel", cq.getDeviceModel().trim(),MatchMode.START));
if (cq.getDeviceOS() != null)
filters.add(Restrictions.like("deviceOperatingSystem", cq.getDeviceOS().trim(), MatchMode.START));
if (cq.getNetworkCarrier() != null)
filters.add(Restrictions.like("networkCarrier", cq.getNetworkCarrier().trim(), MatchMode.START));
if (cq.getNetworkType() != null)
filters.add(Restrictions.like("networkType", cq.getNetworkType().trim(), MatchMode.START));
if (logRawCriteria.getLogMessage() != null)
filters.add(Restrictions.like("logMessage", logRawCriteria.getLogMessage().trim(),MatchMode.ANYWHERE));
return filters;
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
*
*/
package org.jasig.portal.io.xml.eventaggr;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.jasig.portal.events.aggr.AcademicTermDetail;
import org.jasig.portal.events.aggr.AggregatedGroupConfig;
import org.jasig.portal.events.aggr.AggregatedIntervalConfig;
import org.jasig.portal.events.aggr.AggregationInterval;
import org.jasig.portal.events.aggr.EventDateTimeUtils;
import org.jasig.portal.events.aggr.IPortalEventAggregator;
import org.jasig.portal.events.aggr.QuarterDetail;
import org.jasig.portal.events.aggr.dao.IEventAggregationManagementDao;
import org.jasig.portal.events.aggr.dao.jpa.AcademicTermDetailImpl;
import org.jasig.portal.events.aggr.dao.jpa.QuarterDetailImpl;
import org.jasig.portal.events.aggr.groups.AggregatedGroupLookupDao;
import org.jasig.portal.events.aggr.groups.AggregatedGroupMapping;
import org.jasig.portal.io.xml.AbstractJaxbDataHandler;
import org.jasig.portal.io.xml.IPortalData;
import org.jasig.portal.io.xml.IPortalDataType;
import org.jasig.portal.io.xml.PortalDataKey;
import org.jasig.portal.utils.EnumNameComparator;
import org.jasig.portal.utils.SafeFilenameUtils;
import org.joda.time.DateMidnight;
import org.joda.time.MonthDay;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
/**
* @author Eric Dalquist
* @version $Id$
*/
public class EventAggregationConfigurationImporterExporter extends
AbstractJaxbDataHandler<ExternalEventAggregationConfiguration> {
private static final String SINGLE_DATA_ID = "CONFIG";
private EventAggregationConfigurationPortalDataType eventAggregationDataType;
private IEventAggregationManagementDao aggregationManagementDao;
private AggregatedGroupLookupDao aggregatedGroupLookupDao;
@Autowired
public void setAggregatedGroupLookupDao(AggregatedGroupLookupDao aggregatedGroupLookupDao) {
this.aggregatedGroupLookupDao = aggregatedGroupLookupDao;
}
@Autowired
public void setEventAggregationDataType(EventAggregationConfigurationPortalDataType eventAggregationDataType) {
this.eventAggregationDataType = eventAggregationDataType;
}
@Autowired
public void setAggregationManagementDao(IEventAggregationManagementDao aggregationManagementDao) {
this.aggregationManagementDao = aggregationManagementDao;
}
@Override
public Set<PortalDataKey> getImportDataKeys() {
return Collections.singleton(EventAggregationConfigurationPortalDataType.IMPORT_40_DATA_KEY);
}
@Override
public IPortalDataType getPortalDataType() {
return this.eventAggregationDataType;
}
@Override
public Iterable<? extends IPortalData> getPortalData() {
return ImmutableSet.of(new IPortalData() {
@Override
public String getDataId() {
return "Event Aggregation Configuration";
}
@Override
public String getDataTitle() {
return SINGLE_DATA_ID;
}
@Override
public String getDataDescription() {
return null;
}
});
}
@Transactional("aggrEventsTransactionManager")
@Override
public void importData(ExternalEventAggregationConfiguration data) {
//Import interval configs
final Set<AggregatedIntervalConfig> oldAggregatedIntervalConfigs = new HashSet<AggregatedIntervalConfig>(this.aggregationManagementDao.getAggregatedIntervalConfigs());
for (final ExternalAggregatedIntervalConfig extAggregatedIntervalConfig : data.getAggregatedIntervalConfigs()) {
final String aggregatorTypeName = extAggregatedIntervalConfig.getAggregatorType();
final Class<? extends IPortalEventAggregator> aggregatorType = getAggregatorType(aggregatorTypeName);
AggregatedIntervalConfig aggregatedIntervalConfig = this.aggregationManagementDao.getAggregatedIntervalConfig(aggregatorType);
if (aggregatedIntervalConfig == null) {
aggregatedIntervalConfig = this.aggregationManagementDao.createAggregatedIntervalConfig(aggregatorType);
}
//Remove the config from the old configs set, marking it as updated
oldAggregatedIntervalConfigs.remove(aggregatedIntervalConfig);
//Copy over excludes
final Set<AggregationInterval> excluded = aggregatedIntervalConfig.getExcluded();
excluded.clear();
for (final ExternalAggregationInterval extInterval : extAggregatedIntervalConfig.getExcludes()) {
excluded.add(convert(extInterval));
}
//Copy over includes
final Set<AggregationInterval> included = aggregatedIntervalConfig.getIncluded();
included.clear();
for (final ExternalAggregationInterval extInterval : extAggregatedIntervalConfig.getIncludes()) {
included.add(convert(extInterval));
}
this.aggregationManagementDao.updateAggregatedIntervalConfig(aggregatedIntervalConfig);
}
//Delete interval configs that were not updated
for (final AggregatedIntervalConfig aggregatedIntervalConfig : oldAggregatedIntervalConfigs) {
this.aggregationManagementDao.deleteAggregatedIntervalConfig(aggregatedIntervalConfig);
}
//Import Group configs
final Set<AggregatedGroupConfig> oldAggregatedGroupConfigs = new HashSet<AggregatedGroupConfig>(this.aggregationManagementDao.getAggregatedGroupConfigs());
for (final ExternalAggregatedGroupConfig extAggregatedGroupConfig : data.getAggregatedGroupConfigs()) {
final String aggregatorTypeName = extAggregatedGroupConfig.getAggregatorType();
final Class<? extends IPortalEventAggregator> aggregatorType = getAggregatorType(aggregatorTypeName);
AggregatedGroupConfig aggregatedGroupConfig = this.aggregationManagementDao.getAggregatedGroupConfig(aggregatorType);
if (aggregatedGroupConfig == null) {
aggregatedGroupConfig = this.aggregationManagementDao.createAggregatedGroupConfig(aggregatorType);
}
//Remove the config from the old configs set, marking it as updated
oldAggregatedGroupConfigs.remove(aggregatedGroupConfig);
//Copy over excludes
final Set<AggregatedGroupMapping> excluded = aggregatedGroupConfig.getExcluded();
excluded.clear();
for (final ExternalAggregatedGroupMapping extGroup : extAggregatedGroupConfig.getExcludes()) {
excluded.add(convert(extGroup));
}
//Copy over includes
final Set<AggregatedGroupMapping> included = aggregatedGroupConfig.getIncluded();
included.clear();
for (final ExternalAggregatedGroupMapping extGroup : extAggregatedGroupConfig.getIncludes()) {
included.add(convert(extGroup));
}
this.aggregationManagementDao.updateAggregatedGroupConfig(aggregatedGroupConfig);
}
//Delete interval configs that were not updated
for (final AggregatedGroupConfig aggregatedGroupConfig : oldAggregatedGroupConfigs) {
this.aggregationManagementDao.deleteAggregatedGroupConfig(aggregatedGroupConfig);
}
//Set quarter details if configured or set default quarters
final List<ExternalQuarterDetail> extQuarterDetails = data.getQuarterDetails();
final List<QuarterDetail> quarterDetails;
if (!extQuarterDetails.isEmpty()) {
quarterDetails = convertQuarterDetail(extQuarterDetails);
}
else {
quarterDetails = EventDateTimeUtils.createStandardQuarters();
}
this.aggregationManagementDao.setQuarterDetails(quarterDetails);
//Set academic term if configured
final List<AcademicTermDetail> academicTerms = Lists.transform(data.getTermDetails(), new Function<ExternalTermDetail, AcademicTermDetail>() {
public AcademicTermDetail apply(ExternalTermDetail externalTermDetail) {
return new AcademicTermDetailImpl(
new DateMidnight(externalTermDetail.getStart()),
new DateMidnight(externalTermDetail.getEnd()),
externalTermDetail.getName());
}
});
this.aggregationManagementDao.setAcademicTermDetails(academicTerms);
}
protected List<QuarterDetail> convertQuarterDetail(List<ExternalQuarterDetail> externalQuarterDetails) {
final List<QuarterDetail> quarterDetails = new ArrayList<QuarterDetail>(4);
for (final ExternalQuarterDetail externalQuarterDetail : externalQuarterDetails) {
quarterDetails.add(new QuarterDetailImpl(
MonthDay.parse(externalQuarterDetail.getStart()),
MonthDay.parse(externalQuarterDetail.getEnd()),
externalQuarterDetail.getId()));
}
return quarterDetails;
}
protected AggregatedGroupMapping convert(ExternalAggregatedGroupMapping externalAggregatedGroupMapping) {
return this.aggregatedGroupLookupDao.getGroupMapping(
externalAggregatedGroupMapping.getGroupService(),
externalAggregatedGroupMapping.getGroupName());
}
protected AggregationInterval convert(ExternalAggregationInterval externalAggregationInterval) {
return AggregationInterval.valueOf(externalAggregationInterval.name());
}
protected Class<? extends IPortalEventAggregator> getAggregatorType(final String aggregatorTypeName) {
final Class<?> aggregatorType;
try {
aggregatorType = Class.forName(aggregatorTypeName);
}
catch (ClassNotFoundException e) {
throw new RuntimeException("Specified aggregator type name " + aggregatorTypeName + " could not be resolved to a Class", e);
}
if (!IPortalEventAggregator.class.isAssignableFrom(aggregatorType)) {
throw new IllegalArgumentException("Specified aggregator type " + aggregatorType.getName() + " is not an instance of " + IPortalEventAggregator.class.getName());
}
return (Class<? extends IPortalEventAggregator>)aggregatorType;
}
/*
* (non-Javadoc)
* @see org.jasig.portal.io.xml.IDataImporterExporter#exportData(java.lang.String)
*/
@Override
public ExternalEventAggregationConfiguration exportData(String id) {
final ExternalEventAggregationConfiguration externalData = new ExternalEventAggregationConfiguration();
//Copy interval configs
final List<ExternalAggregatedIntervalConfig> aggregatedIntervalConfigs = externalData.getAggregatedIntervalConfigs();
for (final AggregatedIntervalConfig aggregatedIntervalConfig : this.aggregationManagementDao.getAggregatedIntervalConfigs()) {
final ExternalAggregatedIntervalConfig externalIntervalConfig = new ExternalAggregatedIntervalConfig();
externalIntervalConfig.setAggregatorType(aggregatedIntervalConfig.getAggregatorType().getName());
final List<ExternalAggregationInterval> extIncludes = externalIntervalConfig.getIncludes();
for (final AggregationInterval interval : aggregatedIntervalConfig.getIncluded()) {
extIncludes.add(convert(interval));
}
Collections.sort(extIncludes, EnumNameComparator.INSTANCE);
final List<ExternalAggregationInterval> extExcludes = externalIntervalConfig.getExcludes();
for (final AggregationInterval interval : aggregatedIntervalConfig.getExcluded()) {
extExcludes.add(convert(interval));
}
Collections.sort(extExcludes, EnumNameComparator.INSTANCE);
aggregatedIntervalConfigs.add(externalIntervalConfig);
}
Collections.sort(aggregatedIntervalConfigs, ExternalAggregatedDimensionConfigComparator.INSTANCE);
//Copy group configs
final List<ExternalAggregatedGroupConfig> aggregatedGroupConfigs = externalData.getAggregatedGroupConfigs();
for (final AggregatedGroupConfig aggregatedGroupConfig : this.aggregationManagementDao.getAggregatedGroupConfigs()) {
final ExternalAggregatedGroupConfig externalGroupConfig = new ExternalAggregatedGroupConfig();
externalGroupConfig.setAggregatorType(aggregatedGroupConfig.getAggregatorType().getName());
final List<ExternalAggregatedGroupMapping> extIncludes = externalGroupConfig.getIncludes();
for (final AggregatedGroupMapping Group : aggregatedGroupConfig.getIncluded()) {
extIncludes.add(convert(Group));
}
Collections.sort(extIncludes, ExternalAggregatedGroupMappingComparator.INSTANCE);
final List<ExternalAggregatedGroupMapping> extExcludes = externalGroupConfig.getExcludes();
for (final AggregatedGroupMapping Group : aggregatedGroupConfig.getExcluded()) {
extExcludes.add(convert(Group));
}
Collections.sort(extExcludes, ExternalAggregatedGroupMappingComparator.INSTANCE);
aggregatedGroupConfigs.add(externalGroupConfig);
}
Collections.sort(aggregatedGroupConfigs, ExternalAggregatedDimensionConfigComparator.INSTANCE);
//Copy term details
final List<ExternalTermDetail> externalTermDetails = externalData.getTermDetails();
for (final AcademicTermDetail academicTermDetail : this.aggregationManagementDao.getAcademicTermDetails()) {
final ExternalTermDetail externalTermDetail = new ExternalTermDetail();
externalTermDetail.setName(academicTermDetail.getTermName());
externalTermDetail.setStart(academicTermDetail.getStart().toGregorianCalendar());
externalTermDetail.setEnd(academicTermDetail.getEnd().toGregorianCalendar());
externalTermDetails.add(externalTermDetail);
}
Collections.sort(externalTermDetails, ExternalTermDetailComparator.INSTANCE);
//Copy quarter details
final List<ExternalQuarterDetail> quarterDetails = externalData.getQuarterDetails();
for (final QuarterDetail quarterDetail : this.aggregationManagementDao.getQuartersDetails()) {
final ExternalQuarterDetail externalQuarterDetail = new ExternalQuarterDetail();
externalQuarterDetail.setId(quarterDetail.getQuarterId());
externalQuarterDetail.setStart(quarterDetail.getStart().toString());
externalQuarterDetail.setEnd(quarterDetail.getEnd().toString());
quarterDetails.add(externalQuarterDetail);
}
Collections.sort(quarterDetails, ExternalQuarterDetailComparator.INSTANCE);
return externalData;
}
protected ExternalAggregationInterval convert(AggregationInterval aggregationInterval) {
return ExternalAggregationInterval.valueOf(aggregationInterval.name());
}
protected ExternalAggregatedGroupMapping convert(AggregatedGroupMapping aggregatedGroupMapping) {
final ExternalAggregatedGroupMapping externalAggregatedGroupMapping = new ExternalAggregatedGroupMapping();
externalAggregatedGroupMapping.setGroupService(aggregatedGroupMapping.getGroupService());
externalAggregatedGroupMapping.setGroupName(aggregatedGroupMapping.getGroupName());
return externalAggregatedGroupMapping;
}
@Override
public String getFileName(ExternalEventAggregationConfiguration data) {
return SafeFilenameUtils.makeSafeFilename("default");
}
/*
* (non-Javadoc)
* @see org.jasig.portal.io.xml.IDataImporterExporter#deleteData(java.lang.String)
*/
@Transactional("aggrEventsTransactionManager")
@Override
public ExternalEventAggregationConfiguration deleteData(String id) {
final ExternalEventAggregationConfiguration data = this.exportData(id);
for (final AggregatedIntervalConfig aggregatedIntervalConfig : this.aggregationManagementDao.getAggregatedIntervalConfigs()) {
this.aggregationManagementDao.deleteAggregatedIntervalConfig(aggregatedIntervalConfig);
}
for (final AggregatedGroupConfig aggregatedGroupConfig : this.aggregationManagementDao.getAggregatedGroupConfigs()) {
this.aggregationManagementDao.deleteAggregatedGroupConfig(aggregatedGroupConfig);
}
this.aggregationManagementDao.setAcademicTermDetails(Collections.<AcademicTermDetail>emptyList());
this.aggregationManagementDao.setQuarterDetails(EventDateTimeUtils.createStandardQuarters());
return data;
}
}
| |
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.geocities.com/kpdus/jad.html
// Decompiler options: braces fieldsfirst space lnc
package android.support.v4.graphics.drawable;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapShader;
import android.graphics.Canvas;
import android.graphics.ColorFilter;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.util.DisplayMetrics;
public abstract class RoundedBitmapDrawable extends Drawable
{
private static final int DEFAULT_PAINT_FLAGS = 3;
private boolean mApplyGravity;
final Bitmap mBitmap;
private int mBitmapHeight;
private final BitmapShader mBitmapShader;
private int mBitmapWidth;
private float mCornerRadius;
final Rect mDstRect = new Rect();
private final RectF mDstRectF = new RectF();
private int mGravity;
private boolean mIsCircular;
private final Paint mPaint = new Paint(3);
private final Matrix mShaderMatrix = new Matrix();
private int mTargetDensity;
RoundedBitmapDrawable(Resources resources, Bitmap bitmap)
{
mTargetDensity = 160;
mGravity = 119;
mApplyGravity = true;
if (resources != null)
{
mTargetDensity = resources.getDisplayMetrics().densityDpi;
}
mBitmap = bitmap;
if (mBitmap != null)
{
computeBitmapSize();
mBitmapShader = new BitmapShader(mBitmap, android.graphics.Shader.TileMode.CLAMP, android.graphics.Shader.TileMode.CLAMP);
return;
} else
{
mBitmapHeight = -1;
mBitmapWidth = -1;
mBitmapShader = null;
return;
}
}
private void computeBitmapSize()
{
mBitmapWidth = mBitmap.getScaledWidth(mTargetDensity);
mBitmapHeight = mBitmap.getScaledHeight(mTargetDensity);
}
private static boolean isGreaterThanZero(float f)
{
return f > 0.05F;
}
private void updateCircularCornerRadius()
{
mCornerRadius = Math.min(mBitmapHeight, mBitmapWidth) / 2;
}
public void draw(Canvas canvas)
{
Bitmap bitmap = mBitmap;
if (bitmap == null)
{
return;
}
updateDstRect();
if (mPaint.getShader() == null)
{
canvas.drawBitmap(bitmap, null, mDstRect, mPaint);
return;
} else
{
canvas.drawRoundRect(mDstRectF, mCornerRadius, mCornerRadius, mPaint);
return;
}
}
public int getAlpha()
{
return mPaint.getAlpha();
}
public final Bitmap getBitmap()
{
return mBitmap;
}
public ColorFilter getColorFilter()
{
return mPaint.getColorFilter();
}
public float getCornerRadius()
{
return mCornerRadius;
}
public int getGravity()
{
return mGravity;
}
public int getIntrinsicHeight()
{
return mBitmapHeight;
}
public int getIntrinsicWidth()
{
return mBitmapWidth;
}
public int getOpacity()
{
Bitmap bitmap;
if (mGravity == 119 && !mIsCircular)
{
if ((bitmap = mBitmap) != null && !bitmap.hasAlpha() && mPaint.getAlpha() >= 255 && !isGreaterThanZero(mCornerRadius))
{
return -1;
}
}
return -3;
}
public final Paint getPaint()
{
return mPaint;
}
void gravityCompatApply(int i, int j, int k, Rect rect, Rect rect1)
{
throw new UnsupportedOperationException();
}
public boolean hasAntiAlias()
{
return mPaint.isAntiAlias();
}
public boolean hasMipMap()
{
throw new UnsupportedOperationException();
}
public boolean isCircular()
{
return mIsCircular;
}
protected void onBoundsChange(Rect rect)
{
super.onBoundsChange(rect);
if (mIsCircular)
{
updateCircularCornerRadius();
}
mApplyGravity = true;
}
public void setAlpha(int i)
{
if (i != mPaint.getAlpha())
{
mPaint.setAlpha(i);
invalidateSelf();
}
}
public void setAntiAlias(boolean flag)
{
mPaint.setAntiAlias(flag);
invalidateSelf();
}
public void setCircular(boolean flag)
{
mIsCircular = flag;
mApplyGravity = true;
if (flag)
{
updateCircularCornerRadius();
mPaint.setShader(mBitmapShader);
invalidateSelf();
return;
} else
{
setCornerRadius(0.0F);
return;
}
}
public void setColorFilter(ColorFilter colorfilter)
{
mPaint.setColorFilter(colorfilter);
invalidateSelf();
}
public void setCornerRadius(float f)
{
if (mCornerRadius == f)
{
return;
}
mIsCircular = false;
if (isGreaterThanZero(f))
{
mPaint.setShader(mBitmapShader);
} else
{
mPaint.setShader(null);
}
mCornerRadius = f;
invalidateSelf();
}
public void setDither(boolean flag)
{
mPaint.setDither(flag);
invalidateSelf();
}
public void setFilterBitmap(boolean flag)
{
mPaint.setFilterBitmap(flag);
invalidateSelf();
}
public void setGravity(int i)
{
if (mGravity != i)
{
mGravity = i;
mApplyGravity = true;
invalidateSelf();
}
}
public void setMipMap(boolean flag)
{
throw new UnsupportedOperationException();
}
public void setTargetDensity(int i)
{
if (mTargetDensity != i)
{
int j = i;
if (i == 0)
{
j = 160;
}
mTargetDensity = j;
if (mBitmap != null)
{
computeBitmapSize();
}
invalidateSelf();
}
}
public void setTargetDensity(Canvas canvas)
{
setTargetDensity(canvas.getDensity());
}
public void setTargetDensity(DisplayMetrics displaymetrics)
{
setTargetDensity(displaymetrics.densityDpi);
}
void updateDstRect()
{
if (mApplyGravity)
{
if (mIsCircular)
{
int i = Math.min(mBitmapWidth, mBitmapHeight);
gravityCompatApply(mGravity, i, i, getBounds(), mDstRect);
i = Math.min(mDstRect.width(), mDstRect.height());
int j = Math.max(0, (mDstRect.width() - i) / 2);
int k = Math.max(0, (mDstRect.height() - i) / 2);
mDstRect.inset(j, k);
mCornerRadius = 0.5F * (float)i;
} else
{
gravityCompatApply(mGravity, mBitmapWidth, mBitmapHeight, getBounds(), mDstRect);
}
mDstRectF.set(mDstRect);
if (mBitmapShader != null)
{
mShaderMatrix.setTranslate(mDstRectF.left, mDstRectF.top);
mShaderMatrix.preScale(mDstRectF.width() / (float)mBitmap.getWidth(), mDstRectF.height() / (float)mBitmap.getHeight());
mBitmapShader.setLocalMatrix(mShaderMatrix);
mPaint.setShader(mBitmapShader);
}
mApplyGravity = false;
}
}
}
| |
// Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package build.buildfarm.instance.shard;
import build.bazel.remote.execution.v2.Digest;
import build.buildfarm.common.DigestUtil;
import build.buildfarm.common.ScanCount;
import build.buildfarm.common.redis.RedisClient;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import redis.clients.jedis.JedisClusterPipeline;
/**
* @class JedisCasWorkerMap
* @brief A mapping from blob digest to the workers where the blobs reside.
* @details This is used to identify the location of blobs within the shard. {blob digest ->
* set(worker1,worker2)}.
*/
public class JedisCasWorkerMap implements CasWorkerMap {
/**
* @field name
* @brief The unique name of the map.
* @details The name is used in redis to store/access the data. If two maps had the same name,
* they would be instances of the same underlying redis map.
*/
private final String name;
/**
* @field keyExpiration_s
* @brief When keys will expire automatically.
* @details This is currently the same for every key added or adjusted.
* @note units: seconds
*/
private final int keyExpiration_s;
/**
* @brief Constructor.
* @details Construct storage object under the assumption that all calls will go to redis (no
* caching).
* @param name The global name of the map.
* @param keyExpiration_s When to have keys expire automatically. (units: seconds (s))
* @note Overloaded.
*/
public JedisCasWorkerMap(String name, int keyExpiration_s) {
this.name = name;
this.keyExpiration_s = keyExpiration_s;
}
/**
* @brief Adjust blob mappings based on worker changes.
* @details Adjustments are made based on added and removed workers. Expirations are refreshed.
* @param client Client used for interacting with redis when not using cacheMap.
* @param blobDigest The blob digest to adjust worker information from.
* @param addWorkers Workers to add.
* @param removeWorkers Workers to remove.
*/
@Override
public void adjust(
RedisClient client, Digest blobDigest, Set<String> addWorkers, Set<String> removeWorkers)
throws IOException {
String key = redisCasKey(blobDigest);
client.run(
jedis -> {
for (String workerName : addWorkers) {
jedis.sadd(key, workerName);
}
for (String workerName : removeWorkers) {
jedis.srem(key, workerName);
}
jedis.expire(key, keyExpiration_s);
});
}
/**
* @brief Update the blob entry for the worker.
* @details This may add a new key if the blob did not previously exist, or it will adjust the
* worker values based on the worker name. The expiration time is always refreshed.
* @param client Client used for interacting with redis when not using cacheMap.
* @param blobDigest The blob digest to adjust worker information from.
* @param workerName The worker to add for looking up the blob.
*/
@Override
public void add(RedisClient client, Digest blobDigest, String workerName) throws IOException {
String key = redisCasKey(blobDigest);
client.run(
jedis -> {
jedis.sadd(key, workerName);
jedis.expire(key, keyExpiration_s);
});
}
/**
* @brief Update multiple blob entries for a worker.
* @details This may add a new key if the blob did not previously exist, or it will adjust the
* worker values based on the worker name. The expiration time is always refreshed.
* @param client Client used for interacting with redis when not using cacheMap.
* @param blobDigests The blob digests to adjust worker information from.
* @param workerName The worker to add for looking up the blobs.
*/
@Override
public void addAll(RedisClient client, Iterable<Digest> blobDigests, String workerName)
throws IOException {
client.run(
jedis -> {
JedisClusterPipeline p = jedis.pipelined();
for (Digest blobDigest : blobDigests) {
String key = redisCasKey(blobDigest);
p.sadd(key, workerName);
p.expire(key, keyExpiration_s);
}
p.sync();
});
}
/**
* @brief Remove worker value from blob key.
* @details If the blob is already missing, or the worker doesn't exist, this will have no effect.
* @param client Client used for interacting with redis when not using cacheMap.
* @param blobDigest The blob digest to remove the worker from.
* @param workerName The worker name to remove.
*/
@Override
public void remove(RedisClient client, Digest blobDigest, String workerName) throws IOException {
String key = redisCasKey(blobDigest);
client.run(jedis -> jedis.srem(key, workerName));
}
/**
* @brief Remove worker value from all blob keys.
* @details If the blob is already missing, or the worker doesn't exist, this will be no effect on
* the key.
* @param client Client used for interacting with redis when not using cacheMap.
* @param blobDigests The blob digests to remove the worker from.
* @param workerName The worker name to remove.
*/
@Override
public void removeAll(RedisClient client, Iterable<Digest> blobDigests, String workerName)
throws IOException {
client.run(
jedis -> {
JedisClusterPipeline p = jedis.pipelined();
for (Digest blobDigest : blobDigests) {
String key = redisCasKey(blobDigest);
p.srem(key, workerName);
}
p.sync();
});
}
/**
* @brief Get a random worker for where the blob resides.
* @details Picking a worker may done differently in the future.
* @param client Client used for interacting with redis when not using cacheMap.
* @param blobDigest The blob digest to lookup a worker for.
* @return A worker for where the blob is.
* @note Suggested return identifier: workerName.
*/
@Override
public String getAny(RedisClient client, Digest blobDigest) throws IOException {
String key = redisCasKey(blobDigest);
return client.call(jedis -> jedis.srandmember(key));
}
/**
* @brief Get all of the workers for where a blob resides.
* @details Set is empty if the locaion of the blob is unknown.
* @param client Client used for interacting with redis when not using cacheMap.
* @param blobDigest The blob digest to lookup a worker for.
* @return All the workers where the blob is expected to be.
* @note Suggested return identifier: workerNames.
*/
@Override
public Set<String> get(RedisClient client, Digest blobDigest) throws IOException {
String key = redisCasKey(blobDigest);
return client.call(jedis -> jedis.smembers(key));
}
/**
* @brief Get all of the key values as a map from the digests given.
* @details If there are no workers for the digest, the key is left out of the returned map.
* @param client Client used for interacting with redis when not using cacheMap.
* @param blobDigests The blob digests to get the key/values for.
* @return The key/value map for digests to workers.
* @note Suggested return identifier: casWorkerMap.
*/
@Override
public Map<Digest, Set<String>> getMap(RedisClient client, Iterable<Digest> blobDigests)
throws IOException {
ImmutableMap.Builder<Digest, Set<String>> blobDigestsWorkers = new ImmutableMap.Builder<>();
client.run(
jedis -> {
for (Digest blobDigest : blobDigests) {
String key = redisCasKey(blobDigest);
Set<String> workers = jedis.smembers(key);
if (workers.isEmpty()) {
continue;
}
blobDigestsWorkers.put(blobDigest, workers);
}
});
return blobDigestsWorkers.build();
}
/**
* @brief Get the size of the map.
* @details May be inefficient to due scanning into memory and deduplicating.
* @param client Client used for interacting with redis when not using cacheMap.
* @return The size of the map.
* @note Suggested return identifier: size.
*/
public int size(RedisClient client) throws IOException {
return client.call(jedis -> ScanCount.get(jedis, name + ":*", 1000));
}
/**
* @brief Get the redis key name.
* @details This is to be used for the direct redis implementation.
* @param blobDigest The blob digest to be made part of the key.
* @return The name of the key to use.
* @note Suggested return identifier: keyName.
*/
private String redisCasKey(Digest blobDigest) {
return name + ":" + DigestUtil.toString(blobDigest);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.compression;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import org.junit.Test;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.Region;
import org.apache.geode.compression.Compressor;
import org.apache.geode.compression.SnappyCompressor;
import org.apache.geode.internal.cache.CachedDeserializableFactory;
import org.apache.geode.internal.cache.EntryEventImpl;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.SerializableCallable;
import org.apache.geode.test.dunit.SerializableRunnable;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase;
/**
* Tests basic region operations with compression enabled.
*/
public class CompressionRegionOperationsDUnitTest extends JUnit4CacheTestCase {
/**
* The name of our test region.
*/
public static final String REGION_NAME = "compressedRegion";
/**
* Test virtual machine number.
*/
public static final int TEST_VM = 0;
/**
* A key.
*/
public static final String KEY_1 = "key1";
/**
* Another key.
*/
public static final String KEY_2 = "key2";
/**
* Yet another key.
*/
public static final String KEY_3 = "key3";
/**
* A value.
*/
public static final String VALUE_1 =
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aliquam auctor bibendum tempus. Suspendisse potenti. Ut enim neque, mattis et mattis ac, vulputate quis leo. Cras a metus metus, eget cursus ipsum. Aliquam sagittis condimentum massa aliquet rhoncus. Aliquam sed luctus neque. In hac habitasse platea dictumst.";
/**
* Another value.
*/
private static final String VALUE_2 =
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Praesent sit amet lorem consequat est commodo lacinia. Duis tortor sem, facilisis quis tempus in, luctus lacinia metus. Vivamus augue justo, porttitor in vulputate accumsan, adipiscing sit amet sem. Quisque faucibus porta ipsum in pellentesque. Donec malesuada ultrices sapien sit amet tempus. Sed fringilla ipsum at tellus condimentum et hendrerit arcu pretium. Nulla non leo ligula. Etiam commodo tempor ligula non placerat. Vivamus vestibulum varius arcu a varius. Duis sit amet erat imperdiet dui mattis auctor et id orci. Suspendisse non elit augue. Quisque ac orci turpis, nec sollicitudin justo. Sed bibendum justo ut lacus aliquet lacinia et et neque. Proin hendrerit varius mauris vel lacinia. Proin pellentesque lacus vitae nisl euismod bibendum.";
/**
* Yet another value.
*/
private static final String VALUE_3 =
"In ut nisi nisi, eu malesuada mauris. Vestibulum nec tellus felis. Pellentesque mauris ligula, pretium nec consequat ut, adipiscing non lorem. Vivamus pulvinar viverra nisl, sit amet vestibulum tellus lobortis in. Pellentesque blandit ipsum sed neque rhoncus eu tristique risus porttitor. Vivamus molestie dapibus mi in lacinia. Suspendisse bibendum, purus at gravida accumsan, libero turpis elementum leo, eget posuere purus nibh ac dolor.";
/**
* A map of key, value pairs.
*/
private static Map<String, String> putAllMap = new HashMap<String, String>();
/**
* A map of key, value pairs.
*/
private static Map<String, Object> putAllMap2 = new HashMap<String, Object>();
/**
* A map of key, value pairs.
*/
private static Map<String, byte[]> putAllMap3 = new HashMap<String, byte[]>();
/**
* A collection of keys.
*/
private static Collection<String> getAllCollection = new HashSet<String>();
/**
* Populates the put all map and key collection.
*/
static {
putAllMap.put(KEY_1, VALUE_1);
putAllMap.put(KEY_2, VALUE_2);
putAllMap.put(KEY_3, VALUE_3);
putAllMap2.put(KEY_1,
CachedDeserializableFactory.create(EntryEventImpl.serialize(VALUE_1), null));
putAllMap2.put(KEY_2,
CachedDeserializableFactory.create(EntryEventImpl.serialize(VALUE_2), null));
putAllMap2.put(KEY_3,
CachedDeserializableFactory.create(EntryEventImpl.serialize(VALUE_3), null));
putAllMap3.put(KEY_1, VALUE_1.getBytes());
putAllMap3.put(KEY_2, VALUE_2.getBytes());
putAllMap3.put(KEY_3, VALUE_3.getBytes());
getAllCollection.add(KEY_1);
getAllCollection.add(KEY_2);
getAllCollection.add(KEY_3);
}
@Override
public final void postSetUp() throws Exception {
createRegion();
}
protected void createRegion() {
Compressor compressor = new SnappyCompressor();
createCompressedRegionOnVm(getVM(TEST_VM), REGION_NAME, compressor);
}
@Override
public final void preTearDownCacheTestCase() throws Exception {
Error error = null;
Exception exception = null;
try {
preTearDownCompressionRegionOperationsDUnitTest();
} catch (Error e) {
error = e;
} catch (Exception e) {
exception = e;
}
cleanup(getVM(TEST_VM));
if (error != null) {
throw error;
}
if (exception != null) {
throw exception;
}
}
protected void preTearDownCompressionRegionOperationsDUnitTest() throws Exception {}
/**
* Invokes basic get/put operations tests on the test vm.
*/
@Test
public void testGetPutOperations() {
testGetPutOperationsOnVM(getVM(TEST_VM));
}
/**
* Tests the following operations on a region with compression enabled:
*
* <ul>
* <li>{@link Region#put(Object, Object)}</li>
* <li>{@link Region#putAll(Map)}</li>
* <li>{@link Region#putIfAbsent(Object, Object)}</li>
* <li>{@link Region#get(Object)}</li>
* <li>{@link Region#getAll(Collection)}</li>
* </ul>
*
* @param vm a test virtual machine.
*/
private void testGetPutOperationsOnVM(final VM vm) {
vm.invoke(new SerializableRunnable() {
@Override
public void run() {
Region<String, String> region = getCache().getRegion(REGION_NAME);
String oldValue = (String) region.put(KEY_1, VALUE_1);
assertNull(oldValue);
oldValue = region.get(KEY_1);
assertEquals(VALUE_1, oldValue);
oldValue = region.put(KEY_1, VALUE_2);
if (null != oldValue) {
assertEquals(VALUE_1, oldValue);
}
oldValue = region.get(KEY_1);
assertEquals(VALUE_2, oldValue);
oldValue = region.putIfAbsent(KEY_1, VALUE_3);
assertEquals(VALUE_2, oldValue);
region.putAll(putAllMap);
oldValue = region.get(KEY_1);
assertEquals(VALUE_1, oldValue);
oldValue = region.get(KEY_2);
assertEquals(VALUE_2, oldValue);
oldValue = region.get(KEY_3);
assertEquals(VALUE_3, oldValue);
Map<String, String> getAllMap = region.getAll(getAllCollection);
assertTrue(getAllMap.containsValue(VALUE_1));
assertTrue(getAllMap.containsValue(VALUE_2));
assertTrue(getAllMap.containsValue(VALUE_3));
}
});
}
/**
* Invokes key, value operations using the test VM.
*/
@Test
public void testKeysAndValuesOperations() {
testKeysAndValuesOperationsOnVM(getVM(TEST_VM));
}
/**
* Tests the following region key, value operations:
*
* <ul>
* <li>{@link Region#invalidate(Object)}</li>
* <li>{@link Region#containsKey(Object)}</li>
* <li>{@link Region#containsValue(Object)}</li>
* <li>{@link Region#destroy(Object)}</li>
* <li>{@link Region#remove(Object)}</li>
* <li>{@link Region#remove(Object, Object)}</li>
* <li>{@link Region#replace(Object, Object)}</li>
* <li>{@link Region#replace(Object, Object, Object)}</li>
* <li>{@link Region#values()}</li>
* <li>{@link Region#keySet()}</li>
* </ul>
*
* @param vm a test virtual machine.
*/
private void testKeysAndValuesOperationsOnVM(final VM vm) {
vm.invoke(new SerializableRunnable() {
@Override
public void run() {
Region<String, String> region = getCache().getRegion(REGION_NAME);
String oldValue = region.put(KEY_1, VALUE_1);
assertNull(oldValue);
oldValue = region.get(KEY_1);
assertEquals(VALUE_1, oldValue);
region.invalidate(KEY_1);
assertNull(region.get(KEY_1));
oldValue = region.put(KEY_1, VALUE_1);
assertNull(oldValue);
oldValue = region.get(KEY_1);
assertEquals(VALUE_1, oldValue);
assertTrue(region.containsKey(KEY_1));
assertTrue(region.containsValue(VALUE_1));
region.destroy(KEY_1);
assertNull(region.get(KEY_1));
oldValue = region.put(KEY_1, VALUE_1);
assertNull(oldValue);
oldValue = region.get(KEY_1);
assertEquals(VALUE_1, oldValue);
oldValue = region.remove(KEY_1);
assertEquals(VALUE_1, oldValue);
oldValue = region.put(KEY_1, VALUE_1);
assertNull(oldValue);
assertTrue(region.remove(KEY_1, VALUE_1));
oldValue = region.put(KEY_1, VALUE_1);
assertNull(oldValue);
oldValue = region.replace(KEY_1, VALUE_2);
assertEquals(VALUE_1, oldValue);
oldValue = region.get(KEY_1);
assertEquals(VALUE_2, oldValue);
assertTrue(region.replace(KEY_1, VALUE_2, VALUE_3));
assertTrue(region.values().contains(VALUE_3));
assertTrue(region.keySet().contains(KEY_1));
}
});
}
/**
* Tests compressed put/get region operations using CachedDeserializable values.
*
* @see CompressionRegionOperationsDUnitTest#testGetPutOperations()
*/
@Test
public void testGetPutOperationsWithCachedDeserializable() {
testGetPutOperationsWithCachedDeserializableOnVM(getVM(TEST_VM));
}
/**
* Tests the following operations on a region with compression enabled using CachedDeserializable
* values:
*
* <ul>
* <li>{@link Region#put(Object, Object)}</li>
* <li>{@link Region#putAll(Map)}</li>
* <li>{@link Region#putIfAbsent(Object, Object)}</li>
* <li>{@link Region#get(Object)}</li>
* <li>{@link Region#getAll(Collection)}</li>
* </ul>
*
* @param vm a test virtual machine.
*/
private void testGetPutOperationsWithCachedDeserializableOnVM(final VM vm) {
vm.invoke(new SerializableRunnable() {
@Override
public void run() {
Region<String, Object> region = getCache().getRegion(REGION_NAME);
String oldValue = (String) region.put(KEY_1,
CachedDeserializableFactory.create(EntryEventImpl.serialize(VALUE_1), getCache()));
assertNull(oldValue);
oldValue = (String) region.get(KEY_1);
assertEquals(VALUE_1, oldValue);
oldValue = (String) region.put(KEY_1,
CachedDeserializableFactory.create(EntryEventImpl.serialize(VALUE_2), getCache()));
if (null != oldValue) {
assertEquals(VALUE_1, oldValue);
}
oldValue = (String) region.get(KEY_1);
assertEquals(VALUE_2, oldValue);
oldValue = (String) region.putIfAbsent(KEY_1,
CachedDeserializableFactory.create(EntryEventImpl.serialize(VALUE_3), getCache()));
assertEquals(VALUE_2, oldValue);
region.putAll(putAllMap2);
oldValue = (String) region.get(KEY_1);
assertEquals(VALUE_1, oldValue);
oldValue = (String) region.get(KEY_2);
assertEquals(VALUE_2, oldValue);
oldValue = (String) region.get(KEY_3);
assertEquals(VALUE_3, oldValue);
Map<String, Object> getAllMap = region.getAll(getAllCollection);
assertTrue(getAllMap.containsValue(VALUE_1));
assertTrue(getAllMap.containsValue(VALUE_2));
assertTrue(getAllMap.containsValue(VALUE_3));
}
});
}
/**
* Tests compressed put/get region operations using byte[] values.
*
* @see CompressionRegionOperationsDUnitTest#testGetPutOperations()
*/
@Test
public void testGetPutOperationsWithByteArrays() {
testGetPutOperationsWithByteArraysOnVM(getVM(TEST_VM));
}
/**
* Tests the following operations on a region with compression enabled using byte[] values:
*
* <ul>
* <li>{@link Region#put(Object, Object)}</li>
* <li>{@link Region#putAll(Map)}</li>
* <li>{@link Region#putIfAbsent(Object, Object)}</li>
* <li>{@link Region#get(Object)}</li>
* <li>{@link Region#getAll(Collection)}</li>
* </ul>
*
* @param vm a test virtual machine.
*/
private void testGetPutOperationsWithByteArraysOnVM(final VM vm) {
vm.invoke(new SerializableRunnable() {
@Override
public void run() {
Region<String, byte[]> region = getCache().getRegion(REGION_NAME);
byte[] oldValue = region.put(KEY_1, VALUE_1.getBytes());
assertNull(oldValue);
oldValue = region.get(KEY_1);
assertEquals(VALUE_1, new String(oldValue));
oldValue = region.put(KEY_1, VALUE_2.getBytes());
if (null != oldValue) {
assertEquals(VALUE_1, new String(oldValue));
}
oldValue = region.get(KEY_1);
assertEquals(VALUE_2, new String(oldValue));
oldValue = region.putIfAbsent(KEY_1, VALUE_3.getBytes());
assertEquals(VALUE_2, new String(oldValue));
region.putAll(putAllMap3);
oldValue = region.get(KEY_1);
assertEquals(VALUE_1, new String(oldValue));
oldValue = region.get(KEY_2);
assertEquals(VALUE_2, new String(oldValue));
oldValue = region.get(KEY_3);
assertEquals(VALUE_3, new String(oldValue));
Map<String, byte[]> getAllMap = region.getAll(getAllCollection);
oldValue = getAllMap.get(KEY_1);
assertEquals(VALUE_1, new String(oldValue));
oldValue = getAllMap.get(KEY_2);
assertEquals(VALUE_2, new String(oldValue));
oldValue = getAllMap.get(KEY_3);
assertEquals(VALUE_3, new String(oldValue));
}
});
}
/**
* Returns the VM for a given identifier.
*
* @param vm a virtual machine identifier.
*/
protected VM getVM(int vm) {
return Host.getHost(0).getVM(vm);
}
/**
* Removes created regions from a VM.
*
* @param vm the virtual machine to cleanup.
*/
private void cleanup(final VM vm) {
vm.invoke(new SerializableRunnable() {
@Override
public void run() {
getCache().getRegion(REGION_NAME).destroyRegion();
}
});
}
/**
* Creates a region and assigns a compressor.
*
* @param vm a virtual machine to create the region on.
* @param name a region name.
* @param compressor a compressor.
* @return true if successfully created, otherwise false.
*/
private boolean createCompressedRegionOnVm(final VM vm, final String name,
final Compressor compressor) {
return createCompressedRegionOnVm(vm, name, compressor, false);
}
protected boolean createCompressedRegionOnVm(final VM vm, final String name,
final Compressor compressor, final boolean offHeap) {
return (Boolean) vm.invoke(new SerializableCallable() {
@Override
public Object call() throws Exception {
try {
createRegion(name, compressor, offHeap);
} catch (IllegalStateException e) {
return Boolean.FALSE;
}
return Boolean.TRUE;
}
});
}
/**
* Creates a region and assigns a compressor.
*
* @param name a region name.
* @param compressor a compressor.
*/
private Region createRegion(String name, Compressor compressor, boolean offHeap) {
return getCache().<String, String>createRegionFactory().setDataPolicy(DataPolicy.REPLICATE)
.setCloningEnabled(true).setCompressor(compressor).setOffHeap(offHeap).create(name);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.elasticsearch.sink;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.java.ClosureCleaner;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.util.InstantiationUtil;
import org.apache.http.HttpHost;
import java.util.Arrays;
import java.util.List;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
import static org.apache.flink.util.Preconditions.checkState;
/**
* Base builder to construct a {@link ElasticsearchSink}.
*
* @param <IN> type of the records converted to Elasticsearch actions
*/
@PublicEvolving
public abstract class ElasticsearchSinkBuilderBase<
IN, B extends ElasticsearchSinkBuilderBase<IN, B>> {
private int bulkFlushMaxActions = 1000;
private int bulkFlushMaxMb = -1;
private long bulkFlushInterval = -1;
private FlushBackoffType bulkFlushBackoffType = FlushBackoffType.NONE;
private int bulkFlushBackoffRetries = -1;
private long bulkFlushBackOffDelay = -1;
private DeliveryGuarantee deliveryGuarantee = DeliveryGuarantee.NONE;
private List<HttpHost> hosts;
protected ElasticsearchEmitter<? super IN> emitter;
private String username;
private String password;
private String connectionPathPrefix;
private Integer connectionTimeout;
private Integer connectionRequestTimeout;
private Integer socketTimeout;
protected ElasticsearchSinkBuilderBase() {}
@SuppressWarnings("unchecked")
protected <S extends ElasticsearchSinkBuilderBase<?, ?>> S self() {
return (S) this;
}
/**
* Sets the emitter which is invoked on every record to convert it to Elasticsearch actions.
*
* @param emitter to process records into Elasticsearch actions.
* @return this builder
*/
public <T extends IN> ElasticsearchSinkBuilderBase<T, ?> setEmitter(
ElasticsearchEmitter<? super T> emitter) {
checkNotNull(emitter);
checkState(
InstantiationUtil.isSerializable(emitter),
"The elasticsearch emitter must be serializable.");
final ElasticsearchSinkBuilderBase<T, ?> self = self();
self.emitter = emitter;
return self;
}
/**
* Sets the hosts where the Elasticsearch cluster nodes are reachable.
*
* @param hosts http addresses describing the node locations
* @return this builder
*/
public B setHosts(HttpHost... hosts) {
checkNotNull(hosts);
checkState(hosts.length > 0, "Hosts cannot be empty.");
this.hosts = Arrays.asList(hosts);
return self();
}
/**
* Sets the wanted {@link DeliveryGuarantee}. The default delivery guarantee is {@link
* DeliveryGuarantee#NONE}
*
* @param deliveryGuarantee which describes the record emission behaviour
* @return this builder
*/
public B setDeliveryGuarantee(DeliveryGuarantee deliveryGuarantee) {
checkState(
deliveryGuarantee != DeliveryGuarantee.EXACTLY_ONCE,
"Elasticsearch sink does not support the EXACTLY_ONCE guarantee.");
this.deliveryGuarantee = checkNotNull(deliveryGuarantee);
return self();
}
/**
* Sets the maximum number of actions to buffer for each bulk request. You can pass -1 to
* disable it. The default flush size 1000.
*
* @param numMaxActions the maximum number of actions to buffer per bulk request.
* @return this builder
*/
public B setBulkFlushMaxActions(int numMaxActions) {
checkState(
numMaxActions == -1 || numMaxActions > 0,
"Max number of buffered actions must be larger than 0.");
this.bulkFlushMaxActions = numMaxActions;
return self();
}
/**
* Sets the maximum size of buffered actions, in mb, per bulk request. You can pass -1 to
* disable it.
*
* @param maxSizeMb the maximum size of buffered actions, in mb.
* @return this builder
*/
public B setBulkFlushMaxSizeMb(int maxSizeMb) {
checkState(
maxSizeMb == -1 || maxSizeMb > 0,
"Max size of buffered actions must be larger than 0.");
this.bulkFlushMaxMb = maxSizeMb;
return self();
}
/**
* Sets the bulk flush interval, in milliseconds. You can pass -1 to disable it.
*
* @param intervalMillis the bulk flush interval, in milliseconds.
* @return this builder
*/
public B setBulkFlushInterval(long intervalMillis) {
checkState(
intervalMillis == -1 || intervalMillis >= 0,
"Interval (in milliseconds) between each flush must be larger than "
+ "or equal to 0.");
this.bulkFlushInterval = intervalMillis;
return self();
}
/**
* Sets the type of back off to use when flushing bulk requests. The default bulk flush back off
* type is {@link FlushBackoffType#NONE}.
*
* <p>Sets the amount of delay between each backoff attempt when flushing bulk requests, in
* milliseconds.
*
* <p>Sets the maximum number of retries for a backoff attempt when flushing bulk requests.
*
* @param flushBackoffType the backoff type to use.
* @return this builder
*/
public B setBulkFlushBackoffStrategy(
FlushBackoffType flushBackoffType, int maxRetries, long delayMillis) {
this.bulkFlushBackoffType = checkNotNull(flushBackoffType);
checkState(
flushBackoffType != FlushBackoffType.NONE,
"FlushBackoffType#NONE does not require a configuration it is the default, retries and delay are ignored.");
checkState(maxRetries > 0, "Max number of backoff attempts must be larger than 0.");
this.bulkFlushBackoffRetries = maxRetries;
checkState(
delayMillis >= 0,
"Delay (in milliseconds) between each backoff attempt must be larger "
+ "than or equal to 0.");
this.bulkFlushBackOffDelay = delayMillis;
return self();
}
/**
* Sets the username used to authenticate the connection with the Elasticsearch cluster.
*
* @param username of the Elasticsearch cluster user
* @return this builder
*/
public B setConnectionUsername(String username) {
checkNotNull(username);
this.username = username;
return self();
}
/**
* Sets the password used to authenticate the conection with the Elasticsearch cluster.
*
* @param password of the Elasticsearch cluster user
* @return this builder
*/
public B setConnectionPassword(String password) {
checkNotNull(password);
this.password = password;
return self();
}
/**
* Sets a prefix which used for every REST communication to the Elasticsearch cluster.
*
* @param prefix for the communication
* @return this builder
*/
public B setConnectionPathPrefix(String prefix) {
checkNotNull(prefix);
this.connectionPathPrefix = prefix;
return self();
}
/**
* Sets the timeout for requesting the connection of the Elasticsearch cluster from the
* connection manager.
*
* @param timeout for the connection request
* @return this builder
*/
public B setConnectionRequestTimeout(int timeout) {
checkState(timeout >= 0, "Connection request timeout must be larger than or equal to 0.");
this.connectionRequestTimeout = timeout;
return self();
}
/**
* Sets the timeout for establishing a connection of the Elasticsearch cluster.
*
* @param timeout for the connection
* @return this builder
*/
public B setConnectionTimeout(int timeout) {
checkState(timeout >= 0, "Connection timeout must be larger than or equal to 0.");
this.connectionTimeout = timeout;
return self();
}
/**
* Sets the timeout for waiting for data or, put differently, a maximum period inactivity
* between two consecutive data packets.
*
* @param timeout for the socket
* @return this builder
*/
public B setSocketTimeout(int timeout) {
checkState(timeout >= 0, "Socket timeout must be larger than or equal to 0.");
this.socketTimeout = timeout;
return self();
}
protected abstract BulkProcessorBuilderFactory getBulkProcessorBuilderFactory();
/**
* Constructs the {@link ElasticsearchSink} with the properties configured this builder.
*
* @return {@link ElasticsearchSink}
*/
public ElasticsearchSink<IN> build() {
checkNotNull(emitter);
checkNotNull(hosts);
NetworkClientConfig networkClientConfig = buildNetworkClientConfig();
BulkProcessorConfig bulkProcessorConfig = buildBulkProcessorConfig();
BulkProcessorBuilderFactory bulkProcessorBuilderFactory = getBulkProcessorBuilderFactory();
ClosureCleaner.clean(
bulkProcessorBuilderFactory, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true);
return new ElasticsearchSink<>(
hosts,
emitter,
deliveryGuarantee,
bulkProcessorBuilderFactory,
bulkProcessorConfig,
networkClientConfig);
}
private NetworkClientConfig buildNetworkClientConfig() {
checkArgument(!hosts.isEmpty(), "Hosts cannot be empty.");
return new NetworkClientConfig(
username,
password,
connectionPathPrefix,
connectionRequestTimeout,
connectionTimeout,
socketTimeout);
}
private BulkProcessorConfig buildBulkProcessorConfig() {
return new BulkProcessorConfig(
bulkFlushMaxActions,
bulkFlushMaxMb,
bulkFlushInterval,
bulkFlushBackoffType,
bulkFlushBackoffRetries,
bulkFlushBackOffDelay);
}
@Override
public String toString() {
return "ElasticsearchSinkBuilder{"
+ "bulkFlushMaxActions="
+ bulkFlushMaxActions
+ ", bulkFlushMaxMb="
+ bulkFlushMaxMb
+ ", bulkFlushInterval="
+ bulkFlushInterval
+ ", bulkFlushBackoffType="
+ bulkFlushBackoffType
+ ", bulkFlushBackoffRetries="
+ bulkFlushBackoffRetries
+ ", bulkFlushBackOffDelay="
+ bulkFlushBackOffDelay
+ ", deliveryGuarantee="
+ deliveryGuarantee
+ ", hosts="
+ hosts
+ ", emitter="
+ emitter
+ ", username='"
+ username
+ '\''
+ ", password='"
+ password
+ '\''
+ ", connectionPathPrefix='"
+ connectionPathPrefix
+ '\''
+ '}';
}
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.impl;
import org.compass.core.util.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.model.AccessTokenRequest;
import org.wso2.carbon.apimgt.api.model.KeyManager;
import org.wso2.carbon.apimgt.api.model.KeyManagerConfiguration;
import org.wso2.carbon.apimgt.api.model.KeyManagerConnectorConfiguration;
import org.wso2.carbon.apimgt.api.model.OAuthApplicationInfo;
import org.wso2.carbon.apimgt.impl.factory.ModelKeyManagerForTest;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import java.util.UUID;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@RunWith(PowerMockRunner.class)
@PrepareForTest({ServiceReferenceHolder.class})
public class AbstractKeyManagerTestCase {
@Test
public void buildAccessTokenRequestFromJSONTest() throws APIManagementException {
String jsonPayload = "{ \"callbackUrl\": \"www.google.lk\", \"clientName\": \"rest_api_publisher\", " +
"\"tokenScope\": \"Production\", \"owner\": \"admin\", \"grantType\": \"password refresh_token\", " +
"\"saasApp\": true }";
AbstractKeyManager keyManager = new AMDefaultKeyManagerImpl();
// test AccessTokenRequest null scenario
AccessTokenRequest accessTokenRequest1 = keyManager.buildAccessTokenRequestFromJSON(jsonPayload, null);
Assert.notNull(accessTokenRequest1);
// test json payload without required parameters
AccessTokenRequest accessTokenRequest2 = keyManager.buildAccessTokenRequestFromJSON(jsonPayload,
accessTokenRequest1);
Assert.notNull(accessTokenRequest2);
assertNull(accessTokenRequest2.getClientId());
// test json payload null
assertNull(keyManager.buildAccessTokenRequestFromJSON(null, null));
String jsonPayload2 = "{ \"callbackUrl\": \"www.google.lk\", \"client_id\": \"XBPcXSfGK47WiEX7enchoP2Dcvga\"," +
"\"client_secret\": \"4UD8VX8NaQMtrHCwqzI1tHJLPoca\", \"owner\": \"admin\", \"grantType\": \"password" +
" refresh_token\", " +
"\"validityPeriod\": \"3600\" }";
AccessTokenRequest accessTokenRequest3 = keyManager.buildAccessTokenRequestFromJSON(jsonPayload2,
new AccessTokenRequest());
assertEquals("XBPcXSfGK47WiEX7enchoP2Dcvga", accessTokenRequest3.getClientId());
assertEquals("4UD8VX8NaQMtrHCwqzI1tHJLPoca", accessTokenRequest3.getClientSecret());
assertEquals(3600, accessTokenRequest3.getValidityPeriod());
//Error path with invalid json
try {
keyManager.buildAccessTokenRequestFromJSON("{dd}", null);
assertTrue(false);
} catch (APIManagementException e) {
assertEquals("Error occurred while parsing JSON String", e.getMessage());
}
//Error path with empty JSON
assertNull(keyManager.buildAccessTokenRequestFromJSON("{}", null));
keyManager.buildAccessTokenRequestFromJSON(null, new AccessTokenRequest());
}
@Test
public void buildFromJSONTest() throws APIManagementException {
AbstractKeyManager keyManager = new AMDefaultKeyManagerImpl();
KeyManagerConnectorConfiguration keyManagerConnectorConfiguration = Mockito
.mock(DefaultKeyManagerConnectorConfiguration.class);
ServiceReferenceHolder serviceReferenceHolder = PowerMockito.mock(ServiceReferenceHolder.class);
PowerMockito.mockStatic(ServiceReferenceHolder.class);
PowerMockito.when(ServiceReferenceHolder.getInstance()).thenReturn(serviceReferenceHolder);
Mockito.when(serviceReferenceHolder
.getKeyManagerConnectorConfiguration(APIConstants.KeyManager.DEFAULT_KEY_MANAGER_TYPE))
.thenReturn(keyManagerConnectorConfiguration);
// test with empty json payload
assertNotNull(keyManager.buildFromJSON(new OAuthApplicationInfo(), "{}"));
// test with valid json
String jsonPayload2 = "{ \"callbackUrl\": \"www.google.lk\", \"client_id\": \"XBPcXSfGK47WiEX7enchoP2Dcvga\"," +
"\"client_secret\": \"4UD8VX8NaQMtrHCwqzI1tHJLPoca\", \"owner\": \"admin\", \"grantType\": \"password" +
" refresh_token\", " +
"\"validityPeriod\": \"3600\" }";
OAuthApplicationInfo oAuthApplicationInfo1 = keyManager.buildFromJSON(new OAuthApplicationInfo(), jsonPayload2);
assertEquals("XBPcXSfGK47WiEX7enchoP2Dcvga", oAuthApplicationInfo1.getClientId());
//test with invalid json
try {
keyManager.buildFromJSON(new OAuthApplicationInfo(), "{invalid}");
assertTrue(false);
} catch (APIManagementException e) {
assertEquals("Error occurred while parsing JSON String", e.getMessage());
}
}
@Test
public void buildAccessTokenRequestFromOAuthAppTest() throws APIManagementException {
AbstractKeyManager keyManager = new AMDefaultKeyManagerImpl();
//test null flow
assertNull(keyManager.buildAccessTokenRequestFromOAuthApp(null, null));
// test without client id and secret
try {
keyManager.buildAccessTokenRequestFromOAuthApp(new OAuthApplicationInfo(), new AccessTokenRequest());
assertTrue(false);
} catch (APIManagementException e) {
assertEquals("Consumer key or Consumer Secret missing.", e.getMessage());
}
// test with all the parameters
OAuthApplicationInfo oAuthApplicationInfo = new OAuthApplicationInfo();
oAuthApplicationInfo.setClientId("XBPcXSfGK47WiEX7enchoP2Dcvga");
oAuthApplicationInfo.setClientSecret("4UD8VX8NaQMtrHCwqzI1tHJLPoca");
oAuthApplicationInfo.addParameter("tokenScope", new String[]{"view", "update"});
oAuthApplicationInfo.addParameter("validityPeriod", "1200");
AccessTokenRequest accessTokenRequest = keyManager.buildAccessTokenRequestFromOAuthApp(oAuthApplicationInfo,
null);
assertNotNull(accessTokenRequest);
assertEquals("XBPcXSfGK47WiEX7enchoP2Dcvga", accessTokenRequest.getClientId());
assertEquals("4UD8VX8NaQMtrHCwqzI1tHJLPoca", accessTokenRequest.getClientSecret());
assertEquals(1200, accessTokenRequest.getValidityPeriod());
}
@Test
public void testCanHandleToken() throws APIManagementException {
KeyManagerConfiguration keyManagerConfiguration = new KeyManagerConfiguration();
KeyManager keyManager = new ModelKeyManagerForTest();
keyManager.loadConfiguration(keyManagerConfiguration);
assertTrue(keyManager.canHandleToken(UUID.randomUUID().toString()));
}
@Test
public void testCanHandleTokenEmptyConfiguration() throws APIManagementException {
KeyManagerConfiguration keyManagerConfiguration = new KeyManagerConfiguration();
KeyManager keyManager = new ModelKeyManagerForTest();
keyManagerConfiguration.addParameter(APIConstants.KeyManager.TOKEN_FORMAT_STRING, "[]");
keyManager.loadConfiguration(keyManagerConfiguration);
assertTrue(keyManager.canHandleToken(UUID.randomUUID().toString()));
}
@Test
public void testCanHandleTokenWithConfiguration() throws APIManagementException {
KeyManagerConfiguration keyManagerConfiguration = new KeyManagerConfiguration();
keyManagerConfiguration.addParameter(APIConstants.KeyManager.TOKEN_FORMAT_STRING,
"[{\"enable\": true,\"type\": \"JWT\",\"value\": {\"body\": {\"iss\": \"https://localhost:9443\"}}}]");
KeyManager keyManager = new ModelKeyManagerForTest();
keyManager.loadConfiguration(keyManagerConfiguration);
assertFalse(keyManager.canHandleToken(UUID.randomUUID().toString()));
}
@Test
public void testCanHandleTokenWithConfigurationJWT() throws APIManagementException {
KeyManagerConfiguration keyManagerConfiguration = new KeyManagerConfiguration();
keyManagerConfiguration.addParameter(APIConstants.KeyManager.TOKEN_FORMAT_STRING,
"[{\"enable\": true,\"type\": \"JWT\",\"value\": {\"body\": {\"iss\": \"https://localhost:9443\"}}}]");
KeyManager keyManager = new ModelKeyManagerForTest();
keyManager.loadConfiguration(keyManagerConfiguration);
assertTrue(keyManager.canHandleToken(
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9" +
".eyJpc3MiOiJodHRwczovL2xvY2FsaG9zdDo5NDQzIiwiaWF0IjoxNTkwMTM0NzIyLCJleHAiOjE2MjE2NzA3MjAsImF1ZC" +
"I6Ind3dy5leGFtcGxlLmNvbSIsInN1YiI6Impyb2NrZXRAZXhhbXBsZS5jb20iLCJFbWFpbCI6ImJlZUBleGFtcGxlLmNvb" +
"SJ9.HIxL7_WqeLPkxYdROAwRyL0YEY1YNJRfLghsaHEc7C4"));
assertFalse(keyManager.canHandleToken(
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJodHRwczovL2xvY2FsaG9zdDo5NDQ0IiwiaWF0IjoxN" +
"TkwMTM0NzIyLCJleHAiOjE2MjE2NzA3MjAsImF1ZCI6Ind3dy5leGFtcGxlLmNvbSIsInN1YiI6Impyb2NrZXRAZXhhb" +
"XBsZS5jb20iLCJFbWFpbCI6ImJlZUBleGFtcGxlLmNvbSJ9.QjwcCl7Xs0zmioqsr85VQmW5lgRnkfba-v8OgKwhKyA"));
}
@Test
public void testCanHandleTokenWithConfigurationJWTMultipleClaim() throws APIManagementException {
KeyManagerConfiguration keyManagerConfiguration = new KeyManagerConfiguration();
keyManagerConfiguration.addParameter(APIConstants.KeyManager.TOKEN_FORMAT_STRING,
"[{\"enable\": true,\"type\": \"JWT\",\"value\": {\"body\": {\"iss\": \"https://localhost:9443\"," +
"\"domain\": \"abc.com\"}}}]");
KeyManager keyManager = new ModelKeyManagerForTest();
keyManager.loadConfiguration(keyManagerConfiguration);
assertTrue(keyManager.canHandleToken(
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJodHRwczovL2xvY2FsaG9zdDo5NDQ0IiwiaWF0IjoxN" +
"TkwMTM0NzIyLCJleHAiOjE2MjE2NzA3MjAsImF1ZCI6Ind3dy5leGFtcGxlLmNvbSIsInN1YiI6Impyb2NrZXRAZXhhbX" +
"BsZS5jb20iLCJkb21haW4iOiJhYmMuY29tIn0.pHI2MUhvdGjcOj2yJ-05cHMwtx5kanMhO71m0wFhjic"));
assertFalse(keyManager.canHandleToken(
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9" +
".eyJpc3MiOiJodHRwczovL2xvY2FsaG9zdDo5NDQzIiwiaWF0IjoxNTkwMTM0NzIyLCJleHAiOjE2MjE2NzA3MjAsImF1ZC" +
"I6Ind3dy5leGFtcGxlLmNvbSIsInN1YiI6Impyb2NrZXRAZXhhbXBsZS5jb20iLCJFbWFpbCI6ImJlZUBleGFtcGxlLmNvb" +
"SJ9.HIxL7_WqeLPkxYdROAwRyL0YEY1YNJRfLghsaHEc7C4"));
}
@Test
public void testCanHandleTokenWithConfigurationJWTAndOpaue() throws APIManagementException {
KeyManagerConfiguration keyManagerConfiguration = new KeyManagerConfiguration();
keyManagerConfiguration.addParameter(APIConstants.KeyManager.TOKEN_FORMAT_STRING,
"[{\"enable\": true,\"type\": \"JWT\",\"value\": {\"body\": {\"iss\": \"https://localhost:9443\"}}}," +
"{\"enable\": true,\"type\": \"REFERENCE\",\"value\": \"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0" +
"-9a-fA-F]{3}-[89ab][0-9a-fA-F]{3}-[0-9a-fA-F]{12}\"}]");
KeyManager keyManager = new ModelKeyManagerForTest();
keyManager.loadConfiguration(keyManagerConfiguration);
assertTrue(keyManager.canHandleToken(
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9" +
".eyJpc3MiOiJodHRwczovL2xvY2FsaG9zdDo5NDQzIiwiaWF0IjoxNTkwMTM0NzIyLCJleHAiOjE2MjE2NzA3MjAsImF1ZC" +
"I6Ind3dy5leGFtcGxlLmNvbSIsInN1YiI6Impyb2NrZXRAZXhhbXBsZS5jb20iLCJFbWFpbCI6ImJlZUBleGFtcGxlLmNvb" +
"SJ9.HIxL7_WqeLPkxYdROAwRyL0YEY1YNJRfLghsaHEc7C4"));
assertTrue(keyManager.canHandleToken(UUID.randomUUID().toString()));
}
@Test
public void testCanHandleTokenWithConfigurationJWTAndOpaueDisableOne() throws APIManagementException {
KeyManagerConfiguration keyManagerConfiguration = new KeyManagerConfiguration();
keyManagerConfiguration.addParameter(APIConstants.KeyManager.TOKEN_FORMAT_STRING,
"[{\"enable\": true,\"type\": \"JWT\",\"value\": {\"body\": {\"iss\": \"https://localhost:9443\"}}}," +
"{\"enable\": false,\"type\": \"REFERENCE\",\"value\": " +
"\"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0" +
"-9a-fA-F]{3}-[89ab][0-9a-fA-F]{3}-[0-9a-fA-F]{12}\"}]");
KeyManager keyManager = new ModelKeyManagerForTest();
keyManager.loadConfiguration(keyManagerConfiguration);
assertTrue(keyManager.canHandleToken(
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9" +
".eyJpc3MiOiJodHRwczovL2xvY2FsaG9zdDo5NDQzIiwiaWF0IjoxNTkwMTM0NzIyLCJleHAiOjE2MjE2NzA3MjAsImF1ZC" +
"I6Ind3dy5leGFtcGxlLmNvbSIsInN1YiI6Impyb2NrZXRAZXhhbXBsZS5jb20iLCJFbWFpbCI6ImJlZUBleGFtcGxlLmNvb" +
"SJ9.HIxL7_WqeLPkxYdROAwRyL0YEY1YNJRfLghsaHEc7C4"));
assertFalse(keyManager.canHandleToken(UUID.randomUUID().toString()));
}
@Test
public void testCanHandleTokenWithConfigurationJWTAndOpaueNegative() throws APIManagementException {
KeyManagerConfiguration keyManagerConfiguration = new KeyManagerConfiguration();
keyManagerConfiguration.addParameter(APIConstants.KeyManager.TOKEN_FORMAT_STRING,
"[{\"enable\": true,\"type\": \"JWT\",\"value\": {\"body\": {\"iss\": \"https://localhost:9443\"}}}," +
"{\"enable\": true,\"type\": \"REFERENCE\",\"value\": \"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0" +
"-9a-fA-F]{3}-[89ab][0-9a-fA-F]{3}-[0-9a-fA-F]{12}\"}]");
KeyManager keyManager = new ModelKeyManagerForTest();
keyManager.loadConfiguration(keyManagerConfiguration);
assertFalse(keyManager.canHandleToken("avffr.erwrwrwr.ergrtyttwre"));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterators;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.*;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLock;
import org.elasticsearch.index.aliases.IndexAliasesService;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.deletionpolicy.DeletionPolicyModule;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.settings.IndexSettingsService;
import org.elasticsearch.index.shard.*;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.store.StoreModule;
import org.elasticsearch.index.translog.TranslogService;
import org.elasticsearch.indices.IndicesLifecycle;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InternalIndicesLifecycle;
import org.elasticsearch.indices.cache.query.IndicesQueryCache;
import org.elasticsearch.plugins.PluginsService;
import java.io.Closeable;
import java.io.IOException;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.google.common.collect.Maps.newHashMap;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
/**
*
*/
public class IndexService extends AbstractIndexComponent implements IndexComponent, Iterable<IndexShard> {
private final Injector injector;
private final PluginsService pluginsService;
private final InternalIndicesLifecycle indicesLifecycle;
private final AnalysisService analysisService;
private final MapperService mapperService;
private final IndexQueryParserService queryParserService;
private final SimilarityService similarityService;
private final IndexAliasesService aliasesService;
private final IndexCache indexCache;
private final IndexFieldDataService indexFieldData;
private final BitsetFilterCache bitsetFilterCache;
private final IndexSettingsService settingsService;
private final NodeEnvironment nodeEnv;
private final IndicesService indicesServices;
private volatile ImmutableMap<Integer, IndexShardInjectorPair> shards = ImmutableMap.of();
private static class IndexShardInjectorPair {
private final IndexShard indexShard;
private final Injector injector;
public IndexShardInjectorPair(IndexShard indexShard, Injector injector) {
this.indexShard = indexShard;
this.injector = injector;
}
public IndexShard getIndexShard() {
return indexShard;
}
public Injector getInjector() {
return injector;
}
}
private final AtomicBoolean closed = new AtomicBoolean(false);
private final AtomicBoolean deleted = new AtomicBoolean(false);
@Inject
public IndexService(Injector injector, Index index, NodeEnvironment nodeEnv,
AnalysisService analysisService, MapperService mapperService, IndexQueryParserService queryParserService,
SimilarityService similarityService, IndexAliasesService aliasesService, IndexCache indexCache,
IndexSettingsService settingsService,
IndexFieldDataService indexFieldData, BitsetFilterCache bitSetFilterCache, IndicesService indicesServices) {
super(index, settingsService.getSettings());
this.injector = injector;
this.analysisService = analysisService;
this.mapperService = mapperService;
this.queryParserService = queryParserService;
this.similarityService = similarityService;
this.aliasesService = aliasesService;
this.indexCache = indexCache;
this.indexFieldData = indexFieldData;
this.settingsService = settingsService;
this.bitsetFilterCache = bitSetFilterCache;
this.pluginsService = injector.getInstance(PluginsService.class);
this.indicesServices = indicesServices;
this.indicesLifecycle = (InternalIndicesLifecycle) injector.getInstance(IndicesLifecycle.class);
// inject workarounds for cyclic dep
indexFieldData.setListener(new FieldDataCacheListener(this));
bitSetFilterCache.setListener(new BitsetCacheListener(this));
this.nodeEnv = nodeEnv;
}
public int numberOfShards() {
return shards.size();
}
public InternalIndicesLifecycle indicesLifecycle() {
return this.indicesLifecycle;
}
@Override
public Iterator<IndexShard> iterator() {
return Iterators.transform(shards.values().iterator(), new Function<IndexShardInjectorPair, IndexShard>() {
@Override
public IndexShard apply(IndexShardInjectorPair input) {
return input.getIndexShard();
}
});
}
public boolean hasShard(int shardId) {
return shards.containsKey(shardId);
}
/**
* Return the shard with the provided id, or null if there is no such shard.
*/
@Nullable
public IndexShard shard(int shardId) {
IndexShardInjectorPair indexShardInjectorPair = shards.get(shardId);
if (indexShardInjectorPair != null) {
return indexShardInjectorPair.getIndexShard();
}
return null;
}
/**
* Return the shard with the provided id, or throw an exception if it doesn't exist.
*/
public IndexShard shardSafe(int shardId) {
IndexShard indexShard = shard(shardId);
if (indexShard == null) {
throw new ShardNotFoundException(new ShardId(index, shardId));
}
return indexShard;
}
public Set<Integer> shardIds() {
return shards.keySet();
}
public Injector injector() {
return injector;
}
public IndexSettingsService settingsService() {
return this.settingsService;
}
public IndexCache cache() {
return indexCache;
}
public IndexFieldDataService fieldData() {
return indexFieldData;
}
public BitsetFilterCache bitsetFilterCache() {
return bitsetFilterCache;
}
public AnalysisService analysisService() {
return this.analysisService;
}
public MapperService mapperService() {
return mapperService;
}
public IndexQueryParserService queryParserService() {
return queryParserService;
}
public SimilarityService similarityService() {
return similarityService;
}
public IndexAliasesService aliasesService() {
return aliasesService;
}
public synchronized void close(final String reason, boolean delete) {
if (closed.compareAndSet(false, true)) {
deleted.compareAndSet(false, delete);
final Set<Integer> shardIds = shardIds();
for (final int shardId : shardIds) {
try {
removeShard(shardId, reason);
} catch (Throwable t) {
logger.warn("failed to close shard", t);
}
}
}
}
/**
* Return the shard injector for the provided id, or throw an exception if there is no such shard.
*/
public Injector shardInjectorSafe(int shardId) {
IndexShardInjectorPair indexShardInjectorPair = shards.get(shardId);
if (indexShardInjectorPair == null) {
throw new ShardNotFoundException(new ShardId(index, shardId));
}
return indexShardInjectorPair.getInjector();
}
public String indexUUID() {
return indexSettings().get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE);
}
// NOTE: O(numShards) cost, but numShards should be smallish?
private long getAvgShardSizeInBytes() throws IOException {
long sum = 0;
int count = 0;
for(IndexShard indexShard : this) {
sum += indexShard.store().stats().sizeInBytes();
count++;
}
if (count == 0) {
return -1L;
} else {
return sum / count;
}
}
public synchronized IndexShard createShard(ShardRouting routing) {
final boolean primary = routing.primary();
final Settings indexSettings = indexSettings();
final ShardId shardId = routing.shardId();
/*
* TODO: we execute this in parallel but it's a synced method. Yet, we might
* be able to serialize the execution via the cluster state in the future. for now we just
* keep it synced.
*/
if (closed.get()) {
throw new IllegalStateException("Can't create shard " + shardId + ", closed");
}
ShardLock lock = null;
boolean success = false;
Injector shardInjector = null;
try {
lock = nodeEnv.shardLock(shardId, TimeUnit.SECONDS.toMillis(5));
indicesLifecycle.beforeIndexShardCreated(shardId, indexSettings);
ShardPath path;
try {
path = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings);
} catch (IllegalStateException ex) {
logger.warn("{} failed to load shard path, trying to remove leftover", shardId);
try {
ShardPath.deleteLeftoverShardDirectory(logger, nodeEnv, lock, indexSettings);
path = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings);
} catch (Throwable t) {
t.addSuppressed(ex);
throw t;
}
}
if (path == null) {
// TODO: we should, instead, hold a "bytes reserved" of how large we anticipate this shard will be, e.g. for a shard
// that's being relocated/replicated we know how large it will become once it's done copying:
// Count up how many shards are currently on each data path:
Map<Path,Integer> dataPathToShardCount = new HashMap<>();
for(IndexShard shard : this) {
Path dataPath = shard.shardPath().getRootStatePath();
Integer curCount = dataPathToShardCount.get(dataPath);
if (curCount == null) {
curCount = 0;
}
dataPathToShardCount.put(dataPath, curCount+1);
}
path = ShardPath.selectNewPathForShard(nodeEnv, shardId, indexSettings, routing.getExpectedShardSize() == ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE ? getAvgShardSizeInBytes() : routing.getExpectedShardSize(),
dataPathToShardCount);
logger.debug("{} creating using a new path [{}]", shardId, path);
} else {
logger.debug("{} creating using an existing path [{}]", shardId, path);
}
if (shards.containsKey(shardId.id())) {
throw new IndexShardAlreadyExistsException(shardId + " already exists");
}
logger.debug("creating shard_id {}", shardId);
// if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary.
final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false ||
(primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
ModulesBuilder modules = new ModulesBuilder();
// plugin modules must be added here, before others or we can get crazy injection errors...
for (Module pluginModule : pluginsService.shardModules(indexSettings)) {
modules.add(pluginModule);
}
modules.add(new IndexShardModule(shardId, primary, indexSettings));
modules.add(new StoreModule(injector.getInstance(IndexStore.class).shardDirectory(), lock,
new StoreCloseListener(shardId, canDeleteShardContent, new Closeable() {
@Override
public void close() throws IOException {
injector.getInstance(IndicesQueryCache.class).onClose(shardId);
}
}), path));
modules.add(new DeletionPolicyModule());
pluginsService.processModules(modules);
try {
shardInjector = modules.createChildInjector(injector);
} catch (CreationException e) {
ElasticsearchException ex = new ElasticsearchException("failed to create shard", Injectors.getFirstErrorFailure(e));
ex.setShard(shardId);
throw ex;
} catch (Throwable e) {
ElasticsearchException ex = new ElasticsearchException("failed to create shard", e);
ex.setShard(shardId);
throw ex;
}
IndexShard indexShard = shardInjector.getInstance(IndexShard.class);
indicesLifecycle.indexShardStateChanged(indexShard, null, "shard created");
indicesLifecycle.afterIndexShardCreated(indexShard);
indexShard.updateRoutingEntry(routing, true);
shards = newMapBuilder(shards).put(shardId.id(), new IndexShardInjectorPair(indexShard, shardInjector)).immutableMap();
success = true;
return indexShard;
} catch (IOException e) {
ElasticsearchException ex = new ElasticsearchException("failed to create shard", e);
ex.setShard(shardId);
throw ex;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(lock);
if (shardInjector != null) {
IndexShard indexShard = shardInjector.getInstance(IndexShard.class);
closeShardInjector("initialization failed", shardId, shardInjector, indexShard);
}
}
}
}
public synchronized void removeShard(int shardId, String reason) {
final ShardId sId = new ShardId(index, shardId);
final Injector shardInjector;
final IndexShard indexShard;
if (shards.containsKey(shardId) == false) {
return;
}
logger.debug("[{}] closing... (reason: [{}])", shardId, reason);
HashMap<Integer, IndexShardInjectorPair> tmpShardsMap = newHashMap(shards);
IndexShardInjectorPair indexShardInjectorPair = tmpShardsMap.remove(shardId);
indexShard = indexShardInjectorPair.getIndexShard();
shardInjector = indexShardInjectorPair.getInjector();
shards = ImmutableMap.copyOf(tmpShardsMap);
closeShardInjector(reason, sId, shardInjector, indexShard);
logger.debug("[{}] closed (reason: [{}])", shardId, reason);
}
private void closeShardInjector(String reason, ShardId sId, Injector shardInjector, IndexShard indexShard) {
final int shardId = sId.id();
final Settings indexSettings = indexSettings();
try {
try {
indicesLifecycle.beforeIndexShardClosed(sId, indexShard, indexSettings);
} finally {
// close everything else even if the beforeIndexShardClosed threw an exception
for (Class<? extends Closeable> closeable : pluginsService.shardServices()) {
try {
shardInjector.getInstance(closeable).close();
} catch (Throwable e) {
logger.debug("[{}] failed to clean plugin shard service [{}]", e, shardId, closeable);
}
}
// now we can close the translog service, we need to close it before the we close the shard
// note the that the translog service is not there for shadow replicas
closeInjectorOptionalResource(sId, shardInjector, TranslogService.class);
// this logic is tricky, we want to close the engine so we rollback the changes done to it
// and close the shard so no operations are allowed to it
if (indexShard != null) {
try {
final boolean flushEngine = deleted.get() == false && closed.get(); // only flush we are we closed (closed index or shutdown) and if we are not deleted
indexShard.close(reason, flushEngine);
} catch (Throwable e) {
logger.debug("[{}] failed to close index shard", e, shardId);
// ignore
}
}
closeInjectorResource(sId, shardInjector,
StoreRecoveryService.class);
// call this before we close the store, so we can release resources for it
indicesLifecycle.afterIndexShardClosed(sId, indexShard, indexSettings);
}
} finally {
try {
shardInjector.getInstance(Store.class).close();
} catch (Throwable e) {
logger.warn("[{}] failed to close store on shard removal (reason: [{}])", e, shardId, reason);
}
}
}
/**
* This method gets an instance for each of the given classes passed and calls #close() on the returned instance.
* NOTE: this method swallows all exceptions thrown from the close method of the injector and logs them as debug log
*/
private void closeInjectorResource(ShardId shardId, Injector shardInjector, Class<? extends Closeable>... toClose) {
for (Class<? extends Closeable> closeable : toClose) {
if (closeInjectorOptionalResource(shardId, shardInjector, closeable) == false) {
logger.warn("[{}] no instance available for [{}], ignoring... ", shardId, closeable.getSimpleName());
}
}
}
/**
* Closes an optional resource. Returns true if the resource was found;
* NOTE: this method swallows all exceptions thrown from the close method of the injector and logs them as debug log
*/
private boolean closeInjectorOptionalResource(ShardId shardId, Injector shardInjector, Class<? extends Closeable> toClose) {
try {
final Closeable instance = shardInjector.getInstance(toClose);
if (instance == null) {
return false;
}
IOUtils.close(instance);
} catch (Throwable t) {
logger.debug("{} failed to close {}", t, shardId, Strings.toUnderscoreCase(toClose.getSimpleName()));
}
return true;
}
private void onShardClose(ShardLock lock, boolean ownsShard) {
if (deleted.get()) { // we remove that shards content if this index has been deleted
final Settings indexSettings = indexSettings();
try {
if (ownsShard) {
try {
indicesLifecycle.beforeIndexShardDeleted(lock.getShardId(), indexSettings);
} finally {
indicesServices.deleteShardStore("delete index", lock, indexSettings);
indicesLifecycle.afterIndexShardDeleted(lock.getShardId(), indexSettings);
}
}
} catch (IOException e) {
indicesServices.addPendingDelete(lock.getShardId(), indexSettings);
logger.debug("[{}] failed to delete shard content - scheduled a retry", e, lock.getShardId().id());
}
}
}
private class StoreCloseListener implements Store.OnClose {
private final ShardId shardId;
private final boolean ownsShard;
private final Closeable[] toClose;
public StoreCloseListener(ShardId shardId, boolean ownsShard, Closeable... toClose) {
this.shardId = shardId;
this.ownsShard = ownsShard;
this.toClose = toClose;
}
@Override
public void handle(ShardLock lock) {
try {
assert lock.getShardId().equals(shardId) : "shard id mismatch, expected: " + shardId + " but got: " + lock.getShardId();
onShardClose(lock, ownsShard);
} finally {
try {
IOUtils.close(toClose);
} catch (IOException ex) {
logger.debug("failed to close resource", ex);
}
}
}
}
@Override
public Settings indexSettings() {
return settingsService.getSettings();
}
private static final class BitsetCacheListener implements BitsetFilterCache.Listener {
final IndexService indexService;
private BitsetCacheListener(IndexService indexService) {
this.indexService = indexService;
}
@Override
public void onCache(ShardId shardId, Accountable accountable) {
if (shardId != null) {
final IndexShard shard = indexService.shard(shardId.id());
if (shard != null) {
long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0l;
shard.shardBitsetFilterCache().onCached(ramBytesUsed);
}
}
}
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {
if (shardId != null) {
final IndexShard shard = indexService.shard(shardId.id());
if (shard != null) {
long ramBytesUsed = accountable != null ? accountable.ramBytesUsed() : 0l;
shard.shardBitsetFilterCache().onRemoval(ramBytesUsed);
}
}
}
}
private final class FieldDataCacheListener implements IndexFieldDataCache.Listener {
final IndexService indexService;
public FieldDataCacheListener(IndexService indexService) {
this.indexService = indexService;
}
@Override
public void onCache(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) {
if (shardId != null) {
final IndexShard shard = indexService.shard(shardId.id());
if (shard != null) {
shard.fieldData().onCache(shardId, fieldNames, fieldDataType, ramUsage);
}
}
}
@Override
public void onRemoval(ShardId shardId, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
if (shardId != null) {
final IndexShard shard = indexService.shard(shardId.id());
if (shard != null) {
shard.fieldData().onRemoval(shardId, fieldNames, fieldDataType, wasEvicted, sizeInBytes);
}
}
}
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.replica.catalog.impl;
import org.apache.airavata.model.data.replica.DataProductModel;
import org.apache.airavata.model.data.replica.DataProductType;
import org.apache.airavata.model.data.replica.DataReplicaLocationModel;
import org.apache.airavata.registry.core.replica.catalog.model.DataProduct;
import org.apache.airavata.registry.core.replica.catalog.model.DataReplicaLocation;
import org.apache.airavata.registry.core.replica.catalog.utils.ReplicaCatalogJPAUtils;
import org.apache.airavata.registry.core.replica.catalog.utils.ThriftDataModelConversion;
import org.apache.airavata.registry.cpi.ReplicaCatalog;
import org.apache.airavata.registry.cpi.ReplicaCatalogException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.UUID;
public class ReplicaCatalogImpl implements ReplicaCatalog {
private final static Logger logger = LoggerFactory.getLogger(ReplicaCatalogImpl.class);
@Override
public String registerDataProduct(DataProductModel productModel) throws ReplicaCatalogException {
if(productModel.getOwnerName() == null || productModel.getGatewayId() == null){
throw new ReplicaCatalogException("owner name and gateway id should be non empty");
}
if(productModel.getParentProductUri() != null && (!isExists(productModel.getParentProductUri())
|| !getDataProduct(productModel.getParentProductUri()).getDataProductType().equals(DataProductType.COLLECTION))){
throw new ReplicaCatalogException("Parent Product does not exists or parent type is not Collection");
}
final long currentTime = System.currentTimeMillis();
String productUri = ReplicaCatalog.schema + "://" + UUID.randomUUID().toString();
productModel.setProductUri(productUri);
productModel.setCreationTime(currentTime);
productModel.setLastModifiedTime(currentTime);
if(productModel.getReplicaLocations() != null){
productModel.getReplicaLocations().stream().forEach(r-> {
r.setProductUri(productUri);
r.setReplicaId(UUID.randomUUID().toString());
r.setCreationTime(currentTime);
r.setLastModifiedTime(currentTime);
});
}
productModel.setCreationTime(System.currentTimeMillis());
productModel.setLastModifiedTime(System.currentTimeMillis());
return createDataProduct(productModel);
}
private String createDataProduct(DataProductModel productModel) throws ReplicaCatalogException {
DataProduct dataProduct = ThriftDataModelConversion.getDataProduct(productModel);
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
em.persist(dataProduct);
em.getTransaction().commit();
em.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return dataProduct.getProductUri();
}
@Override
public boolean removeDataProduct(String productUri) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataProduct dataProduct = em.find(DataProduct.class, productUri);
if(dataProduct == null)
return false;
em.getTransaction().begin();
em.remove(dataProduct);
em.getTransaction().commit();
em.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return true;
}
@Override
public boolean updateDataProduct(DataProductModel productModel) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataProduct dataProduct = em.find(DataProduct.class, productModel.getProductUri());
if(dataProduct == null)
return false;
em.getTransaction().begin();
productModel.setCreationTime(dataProduct.getCreationTime().getTime());
productModel.setLastModifiedTime(System.currentTimeMillis());
em.merge(ThriftDataModelConversion.getUpdatedDataProduct(productModel, dataProduct));
em.getTransaction().commit();
em.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return true;
}
@Override
public DataProductModel getDataProduct(String productUri) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataProduct dataProduct = em.find(DataProduct.class, productUri);
return ThriftDataModelConversion.getDataProductModel(dataProduct);
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public boolean isExists(String productUri) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataProduct dataProduct = em.find(DataProduct.class, productUri);
return dataProduct != null;
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public String registerReplicaLocation(DataReplicaLocationModel dataReplicaLocationModel) throws ReplicaCatalogException {
String replicaId = UUID.randomUUID().toString();
dataReplicaLocationModel.setReplicaId(replicaId);
long currentTime = System.currentTimeMillis();
dataReplicaLocationModel.setCreationTime(currentTime);
dataReplicaLocationModel.setLastModifiedTime(currentTime);
dataReplicaLocationModel.setCreationTime(System.currentTimeMillis());
dataReplicaLocationModel.setLastModifiedTime(System.currentTimeMillis());
DataReplicaLocation replicaLocation = ThriftDataModelConversion.getDataReplicaLocation(dataReplicaLocationModel);
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
em.getTransaction().begin();
em.persist(replicaLocation);
em.getTransaction().commit();
em.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return replicaId;
}
@Override
public boolean removeReplicaLocation(String replicaId) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataReplicaLocation replicaLocation = em.find(DataReplicaLocation.class, replicaId);
if(replicaLocation == null)
return false;
em.getTransaction().begin();
em.remove(replicaLocation);
em.getTransaction().commit();
em.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return true;
}
@Override
public boolean updateReplicaLocation(DataReplicaLocationModel dataReplicaLocationModel) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataReplicaLocation dataReplicaLocation = em.find(DataReplicaLocation.class, dataReplicaLocationModel.getReplicaId());
if(dataReplicaLocation == null)
return false;
em.getTransaction().begin();
dataReplicaLocationModel.setCreationTime(dataReplicaLocation.getCreationTime().getTime());
dataReplicaLocationModel.setLastModifiedTime(System.currentTimeMillis());
em.merge(ThriftDataModelConversion.getUpdatedDataReplicaLocation(dataReplicaLocationModel, dataReplicaLocation));
em.getTransaction().commit();
em.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
return true;
}
@Override
public DataReplicaLocationModel getReplicaLocation(String replicaId) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataReplicaLocation replicaLocation = em.find(DataReplicaLocation.class, replicaId);
return ThriftDataModelConversion.getDataReplicaLocationModel(replicaLocation);
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public List<DataReplicaLocationModel> getAllReplicaLocations(String productUri) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataProduct dataProduct = em.find(DataProduct.class, productUri);
if(dataProduct == null)
return null;
ArrayList<DataReplicaLocationModel> dataReplicaLocationModels = new ArrayList<>();
dataProduct.getDataReplicaLocations().stream().forEach(rl->dataReplicaLocationModels
.add(ThriftDataModelConversion.getDataReplicaLocationModel(rl)));
return dataReplicaLocationModels;
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public DataProductModel getParentDataProduct(String productUri) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataProduct dataProduct = em.find(DataProduct.class, productUri);
return ThriftDataModelConversion.getDataProductModel(dataProduct.getParentDataProduct());
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
@Override
public List<DataProductModel> getChildDataProducts(String productUri) throws ReplicaCatalogException {
EntityManager em = null;
try {
em = ReplicaCatalogJPAUtils.getEntityManager();
DataProduct dataProduct = em.find(DataProduct.class, productUri);
Collection<DataProduct> childProducts = dataProduct.getChildDataProducts();
ArrayList<DataProductModel> returnModels = new ArrayList<>();
childProducts.stream().forEach(cp->{
returnModels.add(ThriftDataModelConversion.getDataProductModel(cp));
});
return returnModels;
} catch (Exception e) {
logger.error(e.getMessage(), e);
throw new ReplicaCatalogException(e);
} finally {
if (em != null && em.isOpen()) {
if (em.getTransaction().isActive()) {
em.getTransaction().rollback();
}
em.close();
}
}
}
}
| |
package com.mycompany.myapp.web.rest;
import com.mycompany.myapp.BookstoreApp;
import com.mycompany.myapp.domain.Authority;
import com.mycompany.myapp.domain.User;
import com.mycompany.myapp.repository.AuthorityRepository;
import com.mycompany.myapp.repository.UserRepository;
import com.mycompany.myapp.security.AuthoritiesConstants;
import com.mycompany.myapp.service.MailService;
import com.mycompany.myapp.service.UserService;
import com.mycompany.myapp.service.dto.UserDTO;
import com.mycompany.myapp.web.rest.vm.ManagedUserVM;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import javax.inject.Inject;
import javax.transaction.Transactional;
import java.util.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Test class for the AccountResource REST controller.
*
* @see UserService
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = BookstoreApp.class)
public class AccountResourceIntTest {
@Inject
private UserRepository userRepository;
@Inject
private AuthorityRepository authorityRepository;
@Inject
private UserService userService;
@Mock
private UserService mockUserService;
@Mock
private MailService mockMailService;
private MockMvc restUserMockMvc;
private MockMvc restMvc;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
doNothing().when(mockMailService).sendActivationEmail((User) anyObject(), anyString());
AccountResource accountResource = new AccountResource();
ReflectionTestUtils.setField(accountResource, "userRepository", userRepository);
ReflectionTestUtils.setField(accountResource, "userService", userService);
ReflectionTestUtils.setField(accountResource, "mailService", mockMailService);
AccountResource accountUserMockResource = new AccountResource();
ReflectionTestUtils.setField(accountUserMockResource, "userRepository", userRepository);
ReflectionTestUtils.setField(accountUserMockResource, "userService", mockUserService);
ReflectionTestUtils.setField(accountUserMockResource, "mailService", mockMailService);
this.restMvc = MockMvcBuilders.standaloneSetup(accountResource).build();
this.restUserMockMvc = MockMvcBuilders.standaloneSetup(accountUserMockResource).build();
}
@Test
public void testNonAuthenticatedUser() throws Exception {
restUserMockMvc.perform(get("/api/authenticate")
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().string(""));
}
@Test
public void testAuthenticatedUser() throws Exception {
restUserMockMvc.perform(get("/api/authenticate")
.with(request -> {
request.setRemoteUser("test");
return request;
})
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().string("test"));
}
@Test
public void testGetExistingAccount() throws Exception {
Set<Authority> authorities = new HashSet<>();
Authority authority = new Authority();
authority.setName(AuthoritiesConstants.ADMIN);
authorities.add(authority);
User user = new User();
user.setLogin("test");
user.setFirstName("john");
user.setLastName("doe");
user.setEmail("john.doe@jhipter.com");
user.setAuthorities(authorities);
when(mockUserService.getUserWithAuthorities()).thenReturn(user);
restUserMockMvc.perform(get("/api/account")
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.login").value("test"))
.andExpect(jsonPath("$.firstName").value("john"))
.andExpect(jsonPath("$.lastName").value("doe"))
.andExpect(jsonPath("$.email").value("john.doe@jhipter.com"))
.andExpect(jsonPath("$.authorities").value(AuthoritiesConstants.ADMIN));
}
@Test
public void testGetUnknownAccount() throws Exception {
when(mockUserService.getUserWithAuthorities()).thenReturn(null);
restUserMockMvc.perform(get("/api/account")
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isInternalServerError());
}
@Test
@Transactional
public void testRegisterValid() throws Exception {
ManagedUserVM validUser = new ManagedUserVM(
null, // id
"joe", // login
"password", // password
"Joe", // firstName
"Shmoe", // lastName
"joe@example.com", // e-mail
true, // activated
"en", // langKey
new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)),
null, // createdBy
null, // createdDate
null, // lastModifiedBy
null // lastModifiedDate
);
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(validUser)))
.andExpect(status().isCreated());
Optional<User> user = userRepository.findOneByLogin("joe");
assertThat(user.isPresent()).isTrue();
}
@Test
@Transactional
public void testRegisterInvalidLogin() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM(
null, // id
"funky-log!n", // login <-- invalid
"password", // password
"Funky", // firstName
"One", // lastName
"funky@example.com", // e-mail
true, // activated
"en", // langKey
new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)),
null, // createdBy
null, // createdDate
null, // lastModifiedBy
null // lastModifiedDate
);
restUserMockMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByEmail("funky@example.com");
assertThat(user.isPresent()).isFalse();
}
@Test
@Transactional
public void testRegisterInvalidEmail() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM(
null, // id
"bob", // login
"password", // password
"Bob", // firstName
"Green", // lastName
"invalid", // e-mail <-- invalid
true, // activated
"en", // langKey
new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)),
null, // createdBy
null, // createdDate
null, // lastModifiedBy
null // lastModifiedDate
);
restUserMockMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByLogin("bob");
assertThat(user.isPresent()).isFalse();
}
@Test
@Transactional
public void testRegisterInvalidPassword() throws Exception {
ManagedUserVM invalidUser = new ManagedUserVM(
null, // id
"bob", // login
"123", // password with only 3 digits
"Bob", // firstName
"Green", // lastName
"bob@example.com", // e-mail
true, // activated
"en", // langKey
new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)),
null, // createdBy
null, // createdDate
null, // lastModifiedBy
null // lastModifiedDate
);
restUserMockMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByLogin("bob");
assertThat(user.isPresent()).isFalse();
}
@Test
@Transactional
public void testRegisterDuplicateLogin() throws Exception {
// Good
ManagedUserVM validUser = new ManagedUserVM(
null, // id
"alice", // login
"password", // password
"Alice", // firstName
"Something", // lastName
"alice@example.com", // e-mail
true, // activated
"en", // langKey
new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)),
null, // createdBy
null, // createdDate
null, // lastModifiedBy
null // lastModifiedDate
);
// Duplicate login, different e-mail
ManagedUserVM duplicatedUser = new ManagedUserVM(validUser.getId(), validUser.getLogin(), validUser.getPassword(), validUser.getLogin(), validUser.getLastName(),
"alicejr@example.com", true, validUser.getLangKey(), validUser.getAuthorities(), validUser.getCreatedBy(), validUser.getCreatedDate(), validUser.getLastModifiedBy(), validUser.getLastModifiedDate());
// Good user
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(validUser)))
.andExpect(status().isCreated());
// Duplicate login
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(duplicatedUser)))
.andExpect(status().is4xxClientError());
Optional<User> userDup = userRepository.findOneByEmail("alicejr@example.com");
assertThat(userDup.isPresent()).isFalse();
}
@Test
@Transactional
public void testRegisterDuplicateEmail() throws Exception {
// Good
ManagedUserVM validUser = new ManagedUserVM(
null, // id
"john", // login
"password", // password
"John", // firstName
"Doe", // lastName
"john@example.com", // e-mail
true, // activated
"en", // langKey
new HashSet<>(Arrays.asList(AuthoritiesConstants.USER)),
null, // createdBy
null, // createdDate
null, // lastModifiedBy
null // lastModifiedDate
);
// Duplicate e-mail, different login
ManagedUserVM duplicatedUser = new ManagedUserVM(validUser.getId(), "johnjr", validUser.getPassword(), validUser.getLogin(), validUser.getLastName(),
validUser.getEmail(), true, validUser.getLangKey(), validUser.getAuthorities(), validUser.getCreatedBy(), validUser.getCreatedDate(), validUser.getLastModifiedBy(), validUser.getLastModifiedDate());
// Good user
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(validUser)))
.andExpect(status().isCreated());
// Duplicate e-mail
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(duplicatedUser)))
.andExpect(status().is4xxClientError());
Optional<User> userDup = userRepository.findOneByLogin("johnjr");
assertThat(userDup.isPresent()).isFalse();
}
@Test
@Transactional
public void testRegisterAdminIsIgnored() throws Exception {
ManagedUserVM validUser = new ManagedUserVM(
null, // id
"badguy", // login
"password", // password
"Bad", // firstName
"Guy", // lastName
"badguy@example.com", // e-mail
true, // activated
"en", // langKey
new HashSet<>(Arrays.asList(AuthoritiesConstants.ADMIN)),
null, // createdBy
null, // createdDate
null, // lastModifiedBy
null // lastModifiedDate
);
restMvc.perform(
post("/api/register")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(validUser)))
.andExpect(status().isCreated());
Optional<User> userDup = userRepository.findOneByLogin("badguy");
assertThat(userDup.isPresent()).isTrue();
assertThat(userDup.get().getAuthorities()).hasSize(1)
.containsExactly(authorityRepository.findOne(AuthoritiesConstants.USER));
}
@Test
@Transactional
public void testSaveInvalidLogin() throws Exception {
UserDTO invalidUser = new UserDTO(
"funky-log!n", // login <-- invalid
"Funky", // firstName
"One", // lastName
"funky@example.com", // e-mail
true, // activated
"en", // langKey
new HashSet<>(Arrays.asList(AuthoritiesConstants.USER))
);
restUserMockMvc.perform(
post("/api/account")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(invalidUser)))
.andExpect(status().isBadRequest());
Optional<User> user = userRepository.findOneByEmail("funky@example.com");
assertThat(user.isPresent()).isFalse();
}
}
| |
/* The MIT License (MIT)
*
* Copyright (c) 2015 Reinventing Geospatial, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.rgi.common.coordinate.referencesystem.profile;
import com.rgi.common.BoundingBox;
import com.rgi.common.coordinate.Coordinate;
import com.rgi.common.coordinate.CoordinateReferenceSystem;
import com.rgi.common.coordinate.CrsCoordinate;
import com.rgi.common.tile.TileOrigin;
import com.rgi.common.tile.scheme.TileMatrixDimensions;
import com.rgi.common.util.BoundsUtility;
/**
* Ellipsoidal Mercator implementation of a coordinate reference system profile
*
* @author Luke Lambert
*
*/
public class EllipsoidalMercatorCrsProfile implements CrsProfile
{
/**
* Constructor
*/
public EllipsoidalMercatorCrsProfile()
{
this(1.0,
new CoordinateReferenceSystem("EPSG", 3395));
}
/**
* Constructor used to build slight variants of the Ellipsoidal Mercator projections
*
* @param earthEquatorialRadiusScaleFactor
* The scale factor for the equatorial radius the earth
* @param coordinateReferenceSystem
* The coordinate reference system that corresponds to this
* variant of the Ellipsoidal Mercator projection
*/
protected EllipsoidalMercatorCrsProfile(final double earthEquatorialRadiusScaleFactor, final CoordinateReferenceSystem coordinateReferenceSystem)
{
this.coordinateReferenceSystem = coordinateReferenceSystem;
this.earthEquatorialRadiusScaleFactor = earthEquatorialRadiusScaleFactor;
this.scaledEarthEquatorialRadius = UnscaledEarthEquatorialRadius * earthEquatorialRadiusScaleFactor;
this.earthEquatorialCircumfrence = 2.0 * Math.PI * this.scaledEarthEquatorialRadius;
//final double scaledEarthPolarRadius = UnscaledEarthPolarRadius * this.earthEquatorialRadiusScaleFactor; // TODO IS THIS RIGHT? Verify!
//final double earthPolarCircumfrence = 2.0 * Math.PI * scaledEarthPolarRadius;
this.crsBounds = new BoundingBox(-Math.PI * this.scaledEarthEquatorialRadius,
-Math.PI * this.scaledEarthEquatorialRadius, //according to memo #12 if map level-0 tile is square in shape, then the extreme y values have to match y = +/- Math.PI*scaledEarthEquatorialRadius
Math.PI * this.scaledEarthEquatorialRadius,
Math.PI * this.scaledEarthEquatorialRadius); //NOTE above comment (old value use to be Math.PI*scaledEarthPolarRadius)
}
@Override
public Coordinate<Integer> crsToTileCoordinate(final CrsCoordinate coordinate,
final BoundingBox bounds,
final TileMatrixDimensions dimensions,
final TileOrigin tileOrigin)
{
if(coordinate == null)
{
throw new IllegalArgumentException("Meter coordinate may not be null");
}
if(bounds == null)
{
throw new IllegalArgumentException("Bounds may not be null");
}
if(dimensions == null)
{
throw new IllegalArgumentException("Tile matrix dimensions may not be null");
}
if(tileOrigin == null)
{
throw new IllegalArgumentException("Origin may not be null");
}
if(!coordinate.getCoordinateReferenceSystem().equals(this.getCoordinateReferenceSystem()))
{
throw new IllegalArgumentException("Coordinate's coordinate reference system does not match the tile profile's coordinate reference system");
}
if(!BoundsUtility.contains(roundBounds(bounds, this.getPrecision()), roundCoordinate(coordinate, this.getPrecision()), tileOrigin))//Rounded bc we want to keep the as many decimal places of the
{ //crs coord for the conversion (which may be slightly off
throw new IllegalArgumentException("Coordinate is outside the crsBounds of this coordinate reference system"); //(9 decimal places) due to converting back and forth from lat long)
}
//Convert to Geodetic (latitude and longitude) in order to do tiling
final BoundingBox geodeticBounds = this.getBoundsInLatitudeLongitude(bounds);
final Coordinate<Double> geodeticCoordinate = this.toGlobalGeodetic(coordinate);
final GlobalGeodeticCrsProfile globalGeodetic = new GlobalGeodeticCrsProfile();
final Coordinate<Integer> tileCoordinate = globalGeodetic.crsToTileCoordinate(new CrsCoordinate(geodeticCoordinate,
globalGeodetic.getCoordinateReferenceSystem()),
geodeticBounds,
dimensions,
tileOrigin);
return tileCoordinate;
}
private static BoundingBox roundBounds(final BoundingBox bounds, final int precision)
{
final Coordinate<Double> lowerLeft = roundCoordinate(bounds.getBottomLeft(), precision);
final Coordinate<Double> upperRight = roundCoordinate(bounds.getTopRight() , precision);
return new BoundingBox((lowerLeft.getX()),
(lowerLeft.getY()),
(upperRight.getX()),
(upperRight.getY()));
}
@Override
public CrsCoordinate tileToCrsCoordinate(final int column,
final int row,
final BoundingBox bounds,
final TileMatrixDimensions dimensions,
final TileOrigin tileOrigin)
{
if(bounds == null)
{
throw new IllegalArgumentException("Bounds may not be null");
}
if(dimensions == null)
{
throw new IllegalArgumentException("Tile matrix dimensions may not be null");
}
// if(!dimensions.contains(column, row))
// {
// throw new IllegalArgumentException("The row and column must be within the tile matrix dimensions");
// }
if(tileOrigin == null)
{
throw new IllegalArgumentException("Origin may not be null");
}
final GlobalGeodeticCrsProfile geodeticCrs = new GlobalGeodeticCrsProfile();
final BoundingBox geodeticBounds = this.getBoundsInLatitudeLongitude(bounds);
final CrsCoordinate geodeticCoordinate = geodeticCrs.tileToCrsCoordinate(column, row, geodeticBounds, dimensions, tileOrigin);
final Coordinate<Double> metersCoordinate = this.fromGlobalGeodetic(geodeticCoordinate);
return new CrsCoordinate(metersCoordinate.getX(),
metersCoordinate.getY(),
this.getCoordinateReferenceSystem());
}
@Override
public BoundingBox getTileBounds(final int column,
final int row,
final BoundingBox bounds,
final TileMatrixDimensions dimensions,
final TileOrigin tileOrigin)
{
if(bounds == null)
{
throw new IllegalArgumentException("Bounds may not be null");
}
if(dimensions == null)
{
throw new IllegalArgumentException("Tile matrix dimensions may not be null");
}
if(!dimensions.contains(column, row))
{
throw new IllegalArgumentException("The row and column must be within the tile matrix dimensions");
}
if(tileOrigin == null)
{
throw new IllegalArgumentException("Origin may not be null");
}
final GlobalGeodeticCrsProfile geodeticCrs = new GlobalGeodeticCrsProfile();
final BoundingBox geodeticBounds = this.getBoundsInLatitudeLongitude(bounds);
return this.fromGlobalGeodetic(geodeticCrs.getTileBounds(column, row, geodeticBounds, dimensions, tileOrigin));
}
@Override
public CoordinateReferenceSystem getCoordinateReferenceSystem()
{
return this.coordinateReferenceSystem;
}
@Override
public String getName()
{
return "World Mercator";
}
@Override
public String getWellKnownText()
{
return "PROJCS[\"WGS 84 / World Mercator\",GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",\"8901\"]],UNIT[\"degree\",0.01745329251994328,AUTHORITY[\"EPSG\",\"9122\"]],AUTHORITY[\"EPSG\",\"4326\"]],UNIT[\"metre\",1,AUTHORITY[\"EPSG\",\"9001\"]],PROJECTION[\"Mercator_1SP\"],PARAMETER[\"central_meridian\",0],PARAMETER[\"scale_factor\",1],PARAMETER[\"false_easting\",0],PARAMETER[\"false_northing\",0],AUTHORITY[\"EPSG\",\"3395\"],AXIS[\"Easting\",EAST],AXIS[\"Northing\",NORTH]]";
}
@Override
public String getDescription()
{
return "World (Ellipsoidal) Mercator";
}
@Override
public BoundingBox getBounds()
{
return this.crsBounds;
}
private BoundingBox getBoundsInLatitudeLongitude(final BoundingBox bounds)
{
final Coordinate<Double> lowerLeftGeodetic = this.toGlobalGeodetic(bounds.getBottomLeft());
final Coordinate<Double> upperRightGeodetic = this.toGlobalGeodetic(bounds.getTopRight());
return new BoundingBox(lowerLeftGeodetic.getX(),
lowerLeftGeodetic.getY(),
upperRightGeodetic.getX(),
upperRightGeodetic.getY());
}
@Override
public Coordinate<Double> toGlobalGeodetic(final Coordinate<Double> coordinate)
{
return new Coordinate<>(toLongitude(coordinate.getX()),
toLatitude(coordinate.getY()));
}
@Override
public int getPrecision()
{
return 2;
}
/**
* Transform a coordinate from Global Geodetic (EPSG:4326) to WGS 84
* Ellipsoid World mercator EPSG(3395).
*
* <b>This is a temporary stopgap</b> implemented in lieu of a general
* coordinate transformation mechanism. This method will be deprecated and
* removed in future releases.
*
* @param coordinate
* Coordinate in global geodetic (EPSG:4326) decimal degrees
* @return Returns a coordinate in Ellipsoidal Mercator (EPSG:3395) meters
*/
public Coordinate<Double> fromGlobalGeodetic(final Coordinate<Double> coordinate)
{
final Double lonInRadian = Math.toRadians(coordinate.getX());
final Double latInRadian = Math.toRadians(coordinate.getY());
final double xmeter = this.scaledEarthEquatorialRadius * lonInRadian;
final double ymeter = this.scaledEarthEquatorialRadius * atanh(Math.sin(latInRadian)) - this.scaledEarthEquatorialRadius*Eccentricity*atanh(Eccentricity*(Math.sin(latInRadian)));
//initialize the meter's coordinate
return new Coordinate<>(xmeter, ymeter);
}
private BoundingBox fromGlobalGeodetic(final BoundingBox bounds)
{
final Coordinate<Double> bottomLeft = this.fromGlobalGeodetic(bounds.getBottomLeft());
final Coordinate<Double> topRight = this.fromGlobalGeodetic(bounds.getTopRight());
return new BoundingBox(bottomLeft.getX(),
bottomLeft.getY(),
topRight .getX(),
topRight .getY());
}
/**
* Converts a meters X coordinate of WGS 84
* Ellipsoid World Mercator EPSG(3395) to its
* corresponding longitude value in degrees.
*
* Formula:
* Longitude(in radians) = meters/UnscaledEarthEquatorialRadius
*
* @param meters
* Meters in the in EPSG:3395 coordinate reference system
* @return longitude in Degrees
*/
private static double toLongitude(final double meters)
{
return Math.toDegrees(meters/UnscaledEarthEquatorialRadius);
}
/**
* Converts a meters Y coordinate to of WGS 84
* Ellipsoid World Mercator EPSG(3395) to its
* corresponding latitude value in degrees.
*
* This value is found through the Inverse Mapping Conversion
* function.
*
* @param meters
* Meters in the in EPSG:3395 coordinate reference system
* @return latitude in Degrees
*/
private static double toLatitude(final double meters)
{
return Math.toDegrees(inverseMappingConversionMetersToLatitude(meters));
}
/**
* <b>Inverse Hyperbolic Tangent formula:</b>
*
* <pre>
* atanh = 0.5 * ln[(1 + x) / (1 - x)]
* </pre>
*
* @param x
* in degrees or radians
* @return the corresponding length from the angle x
*/
private static double inverseHyperbolicTangent(final double x)
{
return 0.5 * Math.log((1.0 + x) / (1.0 - x));
}
/**
* Inverse Hyperbolic Tangent equation
*
*/
private static double atanh(final double x)
{
return 0.5 * Math.log((1.0 + x) / (1.0 - x));
}
/**
* Converts a y in meters to the latitude in radians using the inverse
* mapping equation
* <p>
* <h1><b>Recursion formula:</b></h1><p>
* <body><pre>s(1) = tanh(y/a)</pre>
* <pre>s(n+1) = tanh[y/a + e*atanh(e)*s(n)] for n = 1, 2, 3... </pre></body>
* <h2><b>Formula for Latitude:</b></h2>
* <body><pre>Latitude(in radian) = arcsin(s(n+1))</pre></body>
* <p>
* Where s(n+1) is determined by the conversion factor difference of 0.0000000001.
* The difference is calculated by the following formula:<pre> s(n+1) - s(n)</pre>
* <p>
* The difference 0.00000001 was determined as the level of accuracy the formula
* would need to achieve to be acceptable.
* <p>
* atanh is the inverse hyperbolic tangent
*
* @param meters for the latitude in WGS 3395
* @return latitude in radians
*/
private static double inverseMappingConversionMetersToLatitude(final double meters)
{
// s(1) calculation set to previous
double previous = Math.tanh(meters/UnscaledEarthEquatorialRadius);
// Arbitrary initializations of next and difference
double next = 0;
double difference = Double.MAX_VALUE;
final double epsilon = 0.00000000000000000001;
// This will loop until the conversion factor is to the level of
// accuracy determined by the conversion factor difference
while(Math.abs(difference) > epsilon)
{
// s(n+1) calculated by the recursion formula s(n)
next = Math.tanh(((meters/UnscaledEarthEquatorialRadius) + (Eccentricity*inverseHyperbolicTangent(Eccentricity*previous))));
difference = next - previous; // Calculate conversion factor
previous = next; // Set s(n) to s(n+1)
}
// Latitude = arcsine(s(n+1)) Latitude formula
final double yRadians = Math.asin(next);
return yRadians;
}
private static Coordinate<Double> roundCoordinate(final Coordinate<Double> value, final int percision)
{
final double divisor = Math.pow(10, percision);
return new Coordinate<>(Math.round(value.getX()*divisor)/divisor, Math.round(value.getY()*divisor)/divisor);
}
/**
* Datum's (WGS 84) spheroid's semi-major axis (radius of earth) in meters
*/
public static final double UnscaledEarthEquatorialRadius = 6378137.0;
/**
* Datum's (WGS 84) spheroid's inverse flattening in meters
*/
public static final double InverseFlattening = 298.257223563;
/**
* Flattening in meters
*/
public static final double Flattening = 1.0/InverseFlattening;
/**
* Earth's (unscaled) polar radius.
*
* The datum's (WGS 84) spheroid is specified by the equatorial radius (a)
* and the inverse flattening (1/f). The polar radius (b) is derived by
* the relation of the equatorial radius to the inverse flattening:
* <pre>
* a - b
* f = -----
* a
*
* a
* 1/f = -----
* a - b
*
* :math:
*
* a
* b = a - ----
* 1/f
* </pre>
*/
public static final double UnscaledEarthPolarRadius = UnscaledEarthEquatorialRadius - (UnscaledEarthEquatorialRadius/InverseFlattening);
/**
* Ellipsoidal eccentricity
*
* Defined by it's relationship to the ellipsoidal flattening (f):
* <pre>
* e^2 = f(2 - f)
*
* or
* ________
* e = +- \/f(2 - f)
* </pre>
*
* @see <a href="https://en.wikipedia.org/wiki/Flattening#Identities_involving_flattening">Identities involving flattening</a>
*/
public static final double Eccentricity = Math.sqrt(Flattening * (2 - Flattening));
/**
* Used to unify calculations for scaled and unscaled ellipsoidal mercator projections
*/
@SuppressWarnings("unused")
private final double earthEquatorialRadiusScaleFactor;
/**
* Scaled earth radius. Use this for all calculations that use the radius of the earth.
*/
private final double scaledEarthEquatorialRadius;
private final BoundingBox crsBounds;
/**
* Earth's equatorial circumference (based on the datum's spheroid's semi-major axis, raidus) in meters
*/
@SuppressWarnings("unused")
private final double earthEquatorialCircumfrence;
private final CoordinateReferenceSystem coordinateReferenceSystem;
}
| |
/*=========================================================================
* Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* one or more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
/**
* Author: dschneider
* @since 8.1
*/
package com.gemstone.gemfire.internal.cache.tier.sockets.command;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import com.gemstone.gemfire.cache.DynamicRegionFactory;
import com.gemstone.gemfire.cache.RegionDestroyedException;
import com.gemstone.gemfire.cache.ResourceException;
import com.gemstone.gemfire.cache.client.internal.PutAllOp;
import com.gemstone.gemfire.cache.operations.RemoveAllOperationContext;
import com.gemstone.gemfire.distributed.internal.DistributionStats;
import com.gemstone.gemfire.internal.cache.EventID;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.PartitionedRegion;
import com.gemstone.gemfire.internal.cache.PutAllPartialResultException;
import com.gemstone.gemfire.internal.cache.ha.ThreadIdentifier;
import com.gemstone.gemfire.internal.cache.tier.CachedRegionHelper;
import com.gemstone.gemfire.internal.cache.tier.Command;
import com.gemstone.gemfire.internal.cache.tier.MessageType;
import com.gemstone.gemfire.internal.cache.tier.sockets.BaseCommand;
import com.gemstone.gemfire.internal.cache.tier.sockets.CacheServerStats;
import com.gemstone.gemfire.internal.cache.tier.sockets.ChunkedMessage;
import com.gemstone.gemfire.internal.cache.tier.sockets.Message;
import com.gemstone.gemfire.internal.cache.tier.sockets.Part;
import com.gemstone.gemfire.internal.cache.tier.sockets.ServerConnection;
import com.gemstone.gemfire.internal.cache.tier.sockets.VersionedObjectList;
import com.gemstone.gemfire.internal.cache.versions.VersionTag;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import com.gemstone.gemfire.internal.logging.log4j.LocalizedMessage;
import com.gemstone.gemfire.internal.security.AuthorizeRequest;
import com.gemstone.gemfire.internal.util.Breadcrumbs;
public class RemoveAll extends BaseCommand {
private final static RemoveAll singleton = new RemoveAll();
public static Command getCommand() {
return singleton;
}
protected RemoveAll() {
}
@Override
public void cmdExecute(Message msg, ServerConnection servConn, long startp)
throws IOException, InterruptedException {
long start = startp; // copy this since we need to modify it
Part regionNamePart = null, numberOfKeysPart = null, keyPart = null;
String regionName = null;
int numberOfKeys = 0;
Object key = null;
Part eventPart = null;
boolean replyWithMetaData = false;
VersionedObjectList response = null;
StringBuffer errMessage = new StringBuffer();
CachedRegionHelper crHelper = servConn.getCachedRegionHelper();
CacheServerStats stats = servConn.getCacheServerStats();
if (crHelper.emulateSlowServer() > 0) {
// this.logger.fine("SlowServer", new Exception());
boolean interrupted = Thread.interrupted();
try {
Thread.sleep(crHelper.emulateSlowServer());
}
catch (InterruptedException ugh) {
interrupted = true;
servConn.getCachedRegionHelper().getCache().getCancelCriterion()
.checkCancelInProgress(ugh);
}
finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
servConn.setAsTrue(REQUIRES_RESPONSE);
servConn.setAsTrue(REQUIRES_CHUNKED_RESPONSE);
{
long oldStart = start;
start = DistributionStats.getStatTime();
stats.incReadRemoveAllRequestTime(start - oldStart);
}
try {
// Retrieve the data from the message parts
// part 0: region name
regionNamePart = msg.getPart(0);
regionName = regionNamePart.getString();
if (regionName == null) {
String txt = LocalizedStrings.RemoveAll_THE_INPUT_REGION_NAME_FOR_THE_REMOVEALL_REQUEST_IS_NULL.toLocalizedString();
logger.warn(LocalizedMessage.create(LocalizedStrings.TWO_ARG_COLON, new Object[] {servConn.getName(), txt}));
errMessage.append(txt);
writeChunkedErrorResponse(msg, MessageType.PUT_DATA_ERROR,
errMessage.toString(), servConn);
servConn.setAsTrue(RESPONDED);
return;
}
LocalRegion region = (LocalRegion)crHelper.getRegion(regionName);
if (region == null) {
String reason = " was not found during removeAll request";
writeRegionDestroyedEx(msg, regionName, reason, servConn);
servConn.setAsTrue(RESPONDED);
return;
}
// part 1: eventID
eventPart = msg.getPart(1);
ByteBuffer eventIdPartsBuffer = ByteBuffer.wrap(eventPart
.getSerializedForm());
long threadId = EventID
.readEventIdPartsFromOptmizedByteArray(eventIdPartsBuffer);
long sequenceId = EventID
.readEventIdPartsFromOptmizedByteArray(eventIdPartsBuffer);
EventID eventId = new EventID(servConn.getEventMemberIDByteArray(),
threadId, sequenceId);
Breadcrumbs.setEventId(eventId);
// part 2: flags
int flags = msg.getPart(2).getInt();
boolean clientIsEmpty = (flags & PutAllOp.FLAG_EMPTY) != 0;
boolean clientHasCCEnabled = (flags & PutAllOp.FLAG_CONCURRENCY_CHECKS) != 0;
// part 3: callbackArg
Object callbackArg = msg.getPart(3).getObject();
// part 4: number of keys
numberOfKeysPart = msg.getPart(4);
numberOfKeys = numberOfKeysPart.getInt();
if (logger.isDebugEnabled()) {
StringBuilder buffer = new StringBuilder();
buffer
.append(servConn.getName())
.append(": Received removeAll request from ")
.append(servConn.getSocketString())
.append(" for region ")
.append(regionName)
.append(callbackArg != null ? (" callbackArg " + callbackArg) : "")
.append(" with ")
.append(numberOfKeys)
.append(" keys.");
logger.debug(buffer);
}
ArrayList<Object> keys = new ArrayList<Object>(numberOfKeys);
ArrayList<VersionTag> retryVersions = new ArrayList<VersionTag>(numberOfKeys);
for (int i=0; i<numberOfKeys; i++) {
keyPart = msg.getPart(5+i);
key = keyPart.getStringOrObject();
if (key == null) {
String txt = LocalizedStrings.RemoveAll_ONE_OF_THE_INPUT_KEYS_FOR_THE_REMOVEALL_REQUEST_IS_NULL.toLocalizedString();
logger.warn(LocalizedMessage.create(LocalizedStrings.TWO_ARG_COLON, new Object[] {servConn.getName(), txt}));
errMessage.append(txt);
writeChunkedErrorResponse(msg, MessageType.PUT_DATA_ERROR,
errMessage.toString(), servConn);
servConn.setAsTrue(RESPONDED);
return;
}
if (msg.isRetry()) {
//Constuct the thread id/sequence id information for this element of the bulk op
//The sequence id is constructed from the base sequence id and the offset
EventID entryEventId= new EventID(eventId, i);
//For PRs, the thread id assigned as a fake thread id.
if(region instanceof PartitionedRegion) {
PartitionedRegion pr = (PartitionedRegion) region;
int bucketId = pr.getKeyInfo(key).getBucketId();
long entryThreadId = ThreadIdentifier.createFakeThreadIDForBulkOp(bucketId, entryEventId.getThreadID());
entryEventId = new EventID(entryEventId.getMembershipID(), entryThreadId, entryEventId.getSequenceID());
}
VersionTag tag = findVersionTagsForRetriedBulkOp(region, entryEventId);
retryVersions.add(tag);
//FIND THE VERSION TAG FOR THIS KEY - but how? all we have is the
// removeAll eventId, not individual eventIds for entries, right?
} else {
retryVersions.add(null);
}
keys.add(key);
} // for
if ( msg.getNumberOfParts() == ( 5 + numberOfKeys + 1) ) {//it means optional timeout has been added
int timeout = msg.getPart(5 + numberOfKeys).getInt();
servConn.setRequestSpecificTimeout(timeout);
}
AuthorizeRequest authzRequest = servConn.getAuthzRequest();
if (authzRequest != null) {
// TODO SW: This is to handle DynamicRegionFactory create
// calls. Rework this when the semantics of DynamicRegionFactory
// are
// cleaned up.
if (DynamicRegionFactory.regionIsDynamicRegionList(regionName)) {
authzRequest.createRegionAuthorize(regionName);
}
else {
RemoveAllOperationContext removeAllContext = authzRequest.removeAllAuthorize(regionName, keys, callbackArg);
callbackArg = removeAllContext.getCallbackArg();
}
}
response = region.basicBridgeRemoveAll(keys, retryVersions, servConn.getProxyID(), eventId, callbackArg);
if (!region.getConcurrencyChecksEnabled() || clientIsEmpty || !clientHasCCEnabled) {
// the client only needs this if versioning is being used and the client
// has storage
if (logger.isTraceEnabled()) {
logger.trace("setting removeAll response to null. region-cc-enabled={}; clientIsEmpty={}; client-cc-enabled={}",
region.getConcurrencyChecksEnabled(), clientIsEmpty, clientHasCCEnabled);
}
response = null;
}
if (region instanceof PartitionedRegion) {
PartitionedRegion pr = (PartitionedRegion)region;
if (pr.isNetworkHop().byteValue() != 0) {
writeReplyWithRefreshMetadata(msg, response, servConn, pr, pr.isNetworkHop());
pr.setIsNetworkHop(Byte.valueOf((byte)0));
pr.setMetadataVersion(Byte.valueOf((byte)0));
replyWithMetaData = true;
}
}
}
catch (RegionDestroyedException rde) {
writeChunkedException(msg, rde, false, servConn);
servConn.setAsTrue(RESPONDED);
return;
}
catch (ResourceException re) {
writeChunkedException(msg, re, false, servConn);
servConn.setAsTrue(RESPONDED);
return;
}
catch (PutAllPartialResultException pre) {
writeChunkedException(msg, pre, false, servConn);
servConn.setAsTrue(RESPONDED);
return;
}
catch (Exception ce) {
// If an interrupted exception is thrown , rethrow it
checkForInterrupt(servConn, ce);
// If an exception occurs during the op, preserve the connection
writeChunkedException(msg, ce, false, servConn);
servConn.setAsTrue(RESPONDED);
// if (logger.fineEnabled()) {
logger.warn(LocalizedMessage.create(LocalizedStrings.Generic_0_UNEXPECTED_EXCEPTION, servConn.getName()), ce);
// }
return;
}
finally {
long oldStart = start;
start = DistributionStats.getStatTime();
stats.incProcessRemoveAllTime(start - oldStart);
}
if (logger.isDebugEnabled()) {
logger.debug("{}: Sending removeAll response back to {} for region {}{}", servConn.getName(),
servConn.getSocketString(), regionName, (logger.isTraceEnabled()? ": " + response : ""));
}
// Increment statistics and write the reply
if (!replyWithMetaData) {
writeReply(msg, response, servConn);
}
servConn.setAsTrue(RESPONDED);
stats.incWriteRemoveAllResponseTime(DistributionStats.getStatTime() - start);
}
@Override
protected void writeReply(Message origMsg, ServerConnection servConn)
throws IOException {
throw new UnsupportedOperationException();
}
protected void writeReply(Message origMsg, VersionedObjectList response,
ServerConnection servConn) throws IOException {
servConn.getCache().getCancelCriterion().checkCancelInProgress(null);
ChunkedMessage replyMsg = servConn.getChunkedResponseMessage();
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setTransactionId(origMsg.getTransactionId());
int listSize = (response == null) ? 0 : response.size();
if (response != null) {
response.setKeys(null);
}
if (logger.isDebugEnabled()) {
logger.debug("sending chunked response header. version list size={}{}", listSize,
(logger.isTraceEnabled()? " list=" + response : ""));
}
replyMsg.sendHeader();
if (listSize > 0) {
int chunkSize = 2*maximumChunkSize;
// Chunker will stream over the list in its toData method
VersionedObjectList.Chunker chunk = new VersionedObjectList.Chunker(response, chunkSize, false, false);
for (int i=0; i<listSize; i+=chunkSize) {
boolean lastChunk = (i+chunkSize >= listSize);
replyMsg.setNumberOfParts(1);
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setLastChunk(lastChunk);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.addObjPart(chunk);
if (logger.isDebugEnabled()) {
logger.debug("sending chunk at index {} last chunk={} numParts={}",i , lastChunk, replyMsg.getNumberOfParts());
}
replyMsg.sendChunk(servConn);
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("sending only header");
}
replyMsg.addObjPart(null);
replyMsg.setLastChunk(true);
replyMsg.sendChunk(servConn);
}
servConn.setAsTrue(RESPONDED);
if (logger.isTraceEnabled()) {
logger.trace("{}: rpl tx: {}", servConn.getName(), origMsg.getTransactionId());
}
}
@Override
protected void writeReplyWithRefreshMetadata(Message origMsg,
ServerConnection servConn, PartitionedRegion pr, byte nwHop) throws IOException {
throw new UnsupportedOperationException();
}
private void writeReplyWithRefreshMetadata(Message origMsg,
VersionedObjectList response, ServerConnection servConn,
PartitionedRegion pr, byte nwHop) throws IOException {
servConn.getCache().getCancelCriterion().checkCancelInProgress(null);
ChunkedMessage replyMsg = servConn.getChunkedResponseMessage();
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.sendHeader();
int listSize = (response == null) ? 0 : response.size();
if (logger.isDebugEnabled()) {
logger.debug("sending chunked response header with metadata refresh status. Version list size = {}{}",
listSize, (logger.isTraceEnabled()? "; list=" + response : ""));
}
if (response != null) {
response.setKeys(null);
}
replyMsg.setNumberOfParts(1);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.addBytesPart(new byte[]{pr.getMetadataVersion().byteValue(), nwHop});
if (listSize > 0) {
replyMsg.setLastChunk(false);
replyMsg.sendChunk(servConn);
int chunkSize = 2*maximumChunkSize; // maximumChunkSize
// Chunker will stream over the list in its toData method
VersionedObjectList.Chunker chunk = new VersionedObjectList.Chunker(response, chunkSize, false, false);
for (int i=0; i<listSize; i+=chunkSize) {
boolean lastChunk = (i+chunkSize >= listSize);
replyMsg.setNumberOfParts(1); // resets the message
replyMsg.setMessageType(MessageType.RESPONSE);
replyMsg.setLastChunk(lastChunk);
replyMsg.setTransactionId(origMsg.getTransactionId());
replyMsg.addObjPart(chunk);
if (logger.isDebugEnabled()) {
logger.debug("sending chunk at index {} last chunk={} numParts={}", i, lastChunk, replyMsg.getNumberOfParts());
}
replyMsg.sendChunk(servConn);
}
} else {
replyMsg.setLastChunk(true);
if (logger.isDebugEnabled()) {
logger.debug("sending first and only part of chunked message");
}
replyMsg.sendChunk(servConn);
}
pr.getPrStats().incPRMetaDataSentCount();
if (logger.isTraceEnabled()) {
logger.trace("{}: rpl with REFRESH_METADAT tx: {}", servConn.getName(), origMsg.getTransactionId());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.extensions.markup.html.form.palette.component;
import java.util.Iterator;
import java.util.Map;
import org.apache.wicket.core.util.string.JavaScriptUtils;
import org.apache.wicket.extensions.markup.html.form.palette.Palette;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.MarkupStream;
import org.apache.wicket.markup.html.form.FormComponent;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.util.convert.IConverter;
import org.apache.wicket.util.string.Strings;
import org.apache.wicket.util.value.IValueMap;
/**
* Generates html option elements based on iterator specified by getOptionsIterator() and
* IChoiceRender specified by the palette
*
* @param <T>
* @author Igor Vaynberg ( ivaynberg )
*/
public abstract class AbstractOptions<T> extends FormComponent<T>
{
private static final long serialVersionUID = 1L;
private final Palette<T> palette;
protected Palette<T> getPalette()
{
return palette;
}
/**
* @param id
* component id
* @param palette
* parent palette
*/
public AbstractOptions(final String id, final Palette<T> palette)
{
super(id);
this.palette = palette;
setOutputMarkupId(true);
}
protected abstract Iterator<T> getOptionsIterator();
/**
* {@inheritDoc}
*/
@Override
public void onComponentTagBody(final MarkupStream markupStream, final ComponentTag openTag)
{
StringBuilder buffer = new StringBuilder(128);
Iterator<T> options = getOptionsIterator();
IChoiceRenderer<? super T> renderer = getPalette().getChoiceRenderer();
boolean localizeDisplayValues = localizeDisplayValues();
while (options.hasNext())
{
final T choice = options.next();
final CharSequence id;
{
String value = renderer.getIdValue(choice, 0);
if (getEscapeModelStrings())
{
id = Strings.escapeMarkup(value);
}
else
{
id = value;
}
}
final CharSequence value;
{
Object displayValue = renderer.getDisplayValue(choice);
Class<?> displayClass = displayValue == null ? null : displayValue.getClass();
@SuppressWarnings("unchecked")
IConverter<Object> converter = (IConverter<Object>)getConverter(displayClass);
String displayString = converter.convertToString(displayValue, getLocale());
if (localizeDisplayValues)
{
displayString = getLocalizer().getString(displayString, this, displayString);
}
if (getEscapeModelStrings())
{
value = Strings.escapeMarkup(displayString);
}
else
{
value = displayString;
}
}
buffer.append("\n<option value=\"").append(id).append("\"");
Map<String, String> additionalAttributesMap = getAdditionalAttributes(choice);
if (additionalAttributesMap != null)
{
for (Map.Entry<String, String> entry : additionalAttributesMap.entrySet())
{
buffer.append(' ')
.append(entry.getKey())
.append("=\"")
.append(entry.getValue())
.append("\"");
}
}
buffer.append(">").append(value).append("</option>");
}
buffer.append("\n");
replaceComponentTagBody(markupStream, openTag, buffer);
}
/**
* Should display values be localized.
*
* @return default {@code true}
*/
protected boolean localizeDisplayValues()
{
return true;
}
/**
* @param choice
* @return map of attribute/value pairs (String/String)
*/
protected Map<String, String> getAdditionalAttributes(final T choice)
{
return null;
}
/**
* {@inheritDoc}
*/
@Override
protected void onComponentTag(final ComponentTag tag)
{
checkComponentTag(tag, "select");
super.onComponentTag(tag);
IValueMap attrs = tag.getAttributes();
attrs.put("multiple", "multiple");
attrs.put("size", getPalette().getRows());
if (!palette.isPaletteEnabled())
{
attrs.put("disabled", "disabled");
}
avoidAjaxSerialization();
}
/**
* A piece of javascript to avoid serializing the options during AJAX serialization.
*/
protected void avoidAjaxSerialization()
{
getResponse().write(
JavaScriptUtils.SCRIPT_OPEN_TAG +
"if (typeof(Wicket) != \"undefined\" && typeof(Wicket.Form) != \"undefined\")" +
" Wicket.Form.excludeFromAjaxSerialization." + getMarkupId() + "='true';" +
JavaScriptUtils.SCRIPT_CLOSE_TAG);
}
/**
* {@inheritDoc}
*/
@Override
public void updateModel()
{
}
/**
* {@inheritDoc}
*/
@Override
protected String getModelValue()
{
return null;
}
}
| |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.packager;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.codec.binary.Base64;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.AbstractPackagerWrappingCrosswalk;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.crosswalk.CrosswalkObjectNotSupported;
import org.dspace.content.crosswalk.MetadataValidationException;
import org.dspace.content.crosswalk.IngestionCrosswalk;
import org.dspace.content.crosswalk.StreamIngestionCrosswalk;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.PluginManager;
import org.jdom.Document;
import org.jdom.Content;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.Namespace;
import org.jdom.input.SAXBuilder;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.jdom.xpath.XPath;
/**
* <P>
* Manage the METS manifest document for METS importer classes,
* such as the package importer <code>org.dspace.content.packager.MetsSubmission</code>
* and the federated importer <code>org.dspace.app.mets.FederatedMETSImport</code>
* </P>
* <P>
* It can parse the METS document, build an internal model, and give the importers
* access to that model. It also crosswalks
* all of the descriptive and administrative metadata in the METS
* manifest into the target DSpace Item, under control of the importer.
* </P>
*
* <P>
* It reads the following DSpace Configuration entries:
* </P>
* <UL>
* <LI>Local XML schema (XSD) declarations, in the general format:
* <br><code>mets.xsd.<em>identifier</em> = <em>namespace</em> <em>xsd-URL</em></code>
* <br> eg. <code>mets.xsd.dc = http://purl.org/dc/elements/1.1/ dc.xsd</code>
* <br>Add a separate config entry for each schema.
* </LI>
* <p><LI>Crosswalk plugin mappings:
* These tell it the name of the crosswalk plugin to invoke for metadata sections
* with a particular value of <code>MDTYPE</code> (or <code>OTHERMDTYPE</code>)
* By default, the crosswalk mechanism will look for a plugin with the
* same name as the metadata type (e.g. <code>"MODS"</code>,
* <code>"DC"</code>). This example line invokes the <code>QDC</code>
* plugin when <code>MDTYPE="DC"</code>
* <br><code>mets.submission.crosswalk.DC = QDC </code>
* <br> general format is:
* <br><code>mets.submission.crosswalk.<em>mdType</em> = <em>pluginName</em> </code>
* </LI>
* </UL>
*
*
* @author Robert Tansley
* @author WeiHua Huang
* @author Rita Lee
* @author Larry Stone
* @see org.dspace.content.packager.MetsSubmission
* @see org.dspace.app.mets.FederatedMETSImport
*/
public class METSManifest
{
/**
* Callback interface to retrieve data streams in mdRef elements.
* "Package" or file reader returns an input stream for the
* given relative path, e.g. to dereference <code>mdRef</code> elements.
*/
public interface Mdref
{
/**
* Make the contents of an external resource mentioned in
* an <code>mdRef</code> element available as an <code>InputStream</code>.
* The implementation must use the information in the
* <code>mdRef</code> element, and the state in the object that
* implements this interface, to find the actual metadata content.
* <p>
* For example, an implementation that ingests a directory of
* files on the local filesystem would get a relative pathname
* out of the <code>mdRef</code> and open that file.
*
* @param mdRef JDOM element of mdRef in the METS manifest.
* @return stream containing the metadata mentioned in mdRef.
* @throws MetadataValidationException if the mdRef is unacceptable or missing required information.
* @throws IOException if it is returned by services called by this method.
* @throws SQLException if it is returned by services called by this method.
* @throws AuthorizeException if it is returned by services called by this method.
*/
public InputStream getInputStream(Element mdRef)
throws MetadataValidationException, PackageValidationException,
IOException, SQLException, AuthorizeException;
}
/** log4j category */
private static final Logger log = Logger.getLogger(METSManifest.class);
/** Canonical filename of METS manifest within a package or as a bitstream. */
public static final String MANIFEST_FILE = "mets.xml";
/** Prefix of DSpace configuration lines that map METS metadata type to
* crosswalk plugin names.
*/
public static final String CONFIG_METS_PREFIX = "mets.";
/** prefix of config lines identifying local XML Schema (XSD) files */
private static final String CONFIG_XSD_PREFIX = CONFIG_METS_PREFIX+"xsd.";
/** Dublin core element namespace */
private static final Namespace dcNS = Namespace
.getNamespace("http://purl.org/dc/elements/1.1/");
/** Dublin core term namespace (for qualified DC) */
private static final Namespace dcTermNS = Namespace
.getNamespace("http://purl.org/dc/terms/");
/** METS namespace -- includes "mets" prefix for use in XPaths */
public static final Namespace metsNS = Namespace
.getNamespace("mets", "http://www.loc.gov/METS/");
/** XLink namespace -- includes "xlink" prefix prefix for use in XPaths */
public static final Namespace xlinkNS = Namespace
.getNamespace("xlink", "http://www.w3.org/1999/xlink");
/** root element of the current METS manifest. */
private Element mets = null;
/** all mdRef elements in the manifest */
private List mdFiles = null;
/** <file> elements in "original" file group (bundle) */
private List<Element> contentFiles = null;
/** builder to use for mdRef streams, inherited from create() */
private SAXBuilder parser = null;
/** name of packager who created this manifest object, for looking up configuration entries. */
private String configName;
// Create list of local schemas at load time, since it depends only
// on the DSpace configuration.
private static String localSchemas;
static
{
String dspace_dir = ConfigurationManager.getProperty("dspace.dir");
File xsdPath1 = new File(dspace_dir+"/config/schemas/");
File xsdPath2 = new File(dspace_dir+"/config/");
Enumeration<String> pe = (Enumeration<String>)ConfigurationManager.propertyNames();
StringBuffer result = new StringBuffer();
while (pe.hasMoreElements())
{
// config lines have the format:
// mets.xsd.{identifier} = {namespace} {xsd-URL}
// e.g.
// mets.xsd.dc = http://purl.org/dc/elements/1.1/ dc.xsd
// (filename is relative to {dspace_dir}/config/schemas/)
String key = pe.nextElement();
if (key.startsWith(CONFIG_XSD_PREFIX))
{
String spec = ConfigurationManager.getProperty(key);
String val[] = spec.trim().split("\\s+");
if (val.length == 2)
{
File xsd = new File(xsdPath1, val[1]);
if (!xsd.exists())
{
xsd = new File(xsdPath2, val[1]);
}
if (!xsd.exists())
{
log.warn("Schema file not found for config entry=\"" + spec + "\"");
}
else
{
try
{
String u = xsd.toURL().toString();
if (result.length() > 0)
{
result.append(" ");
}
result.append(val[0]).append(" ").append(u);
}
catch (java.net.MalformedURLException e)
{
log.warn("Skipping badly formed XSD URL: "+e.toString());
}
}
}
else
{
log.warn("Schema config entry has wrong format, entry=\"" + spec + "\"");
}
}
}
localSchemas = result.toString();
if (log.isDebugEnabled())
{
log.debug("Got local schemas = \"" + localSchemas + "\"");
}
}
/**
* Default constructor, only called internally.
* @param builder XML parser (for parsing mdRef'd files and binData)
* @param mets parsed METS document
*/
private METSManifest(SAXBuilder builder, Element mets, String configName)
{
super();
this.mets = mets;
parser = builder;
this.configName = configName;
}
/**
* Create a new manifest object from a serialized METS XML document.
* Parse document read from the input stream, optionally validating.
* @param is input stream containing serialized XML
* @param validate if true, enable XML validation using schemas
* in document. Also validates any sub-documents.
* @throws MetadataValidationException if there is any error parsing
* or validating the METS.
* @return new METSManifest object.
*/
public static METSManifest create(InputStream is, boolean validate, String configName)
throws IOException,
MetadataValidationException
{
SAXBuilder builder = new SAXBuilder(validate);
builder.setIgnoringElementContentWhitespace(true);
// Set validation feature
if (validate)
{
builder.setFeature("http://apache.org/xml/features/validation/schema", true);
}
// Tell the parser where local copies of schemas are, to speed up
// validation. Local XSDs are identified in the configuration file.
if (localSchemas.length() > 0)
{
builder.setProperty("http://apache.org/xml/properties/schema/external-schemaLocation", localSchemas);
}
// Parse the METS file
Document metsDocument;
try
{
metsDocument = builder.build(is);
/*** XXX leave commented out except if needed for
*** viewing the METS document that actually gets read.
*
* XMLOutputter outputPretty = new XMLOutputter(Format.getPrettyFormat());
* log.debug("Got METS DOCUMENT:");
* log.debug(outputPretty.outputString(metsDocument));
****/
}
catch (JDOMException je)
{
throw new MetadataValidationException("Error validating METS in "
+ is.toString(), je);
}
return new METSManifest(builder, metsDocument.getRootElement(), configName);
}
/**
* Gets name of the profile to which this METS document conforms.
* @return value the PROFILE attribute of mets element, or null if none.
*/
public String getProfile()
{
return mets.getAttributeValue("PROFILE");
}
/**
* Return the OBJID attribute of the METS manifest.
* This is where the Handle URI/URN of the object can be found.
*
* @return OBJID attribute of METS manifest
*/
public String getObjID()
{
return mets.getAttributeValue("OBJID");
}
/**
* Gets all <code>file</code> elements which make up
* the item's content.
* @return a List of <code>Element</code>s.
*/
public List<Element> getContentFiles()
throws MetadataValidationException
{
if (contentFiles != null)
{
return contentFiles;
}
contentFiles = new ArrayList<Element>();
Element fileSec = mets.getChild("fileSec", metsNS);
if (fileSec != null)
{
Iterator fgi = fileSec.getChildren("fileGrp", metsNS).iterator();
while (fgi.hasNext())
{
Element fg = (Element)fgi.next();
Iterator fi = fg.getChildren("file", metsNS).iterator();
while (fi.hasNext())
{
Element f = (Element)fi.next();
contentFiles.add(f);
}
}
}
return contentFiles;
}
/**
* Gets list of all <code>mdRef</code> elements in the METS
* document. Used by ingester to e.g. check that all
* required files are present.
* @return a List of <code>Element</code>s.
*/
public List getMdFiles()
throws MetadataValidationException
{
if (mdFiles == null)
{
try
{
// Use a special namespace with known prefix
// so we get the right prefix.
XPath xpath = XPath.newInstance("descendant::mets:mdRef");
xpath.addNamespace(metsNS);
mdFiles = xpath.selectNodes(mets);
}
catch (JDOMException je)
{
throw new MetadataValidationException("Failed while searching for mdRef elements in manifest: ", je);
}
}
return mdFiles;
}
/**
* Get the "original" file element for a derived file.
* Finds the original from which this was derived by matching the GROUPID
* attribute that binds it to its original. For instance, the file for
* a thumbnail image would have the same GROUPID as its full-size version.
* <p>
* NOTE: This pattern of relating derived files through the GROUPID
* attribute is peculiar to the DSpace METS SIP profile, and may not be
* generally useful with other sorts of METS documents.
* @param file METS file element of derived file
* @return file path of original or null if none found.
*/
public String getOriginalFilePath(Element file)
{
String groupID = file.getAttributeValue("GROUPID");
if (groupID == null || groupID.equals(""))
{
return null;
}
try
{
XPath xpath = XPath.newInstance(
"mets:fileSec/mets:fileGrp[@USE=\"CONTENT\"]/mets:file[@GROUPID=\""+groupID+"\"]");
xpath.addNamespace(metsNS);
List oFiles = xpath.selectNodes(mets);
if (oFiles.size() > 0)
{
if (log.isDebugEnabled())
{
log.debug("Got ORIGINAL file for derived=" + file.toString());
}
Element flocat = ((Element)oFiles.get(0)).getChild("FLocat", metsNS);
if (flocat != null)
{
return flocat.getAttributeValue("href", xlinkNS);
}
}
return null;
}
catch (JDOMException je)
{
log.warn("Got exception on XPATH looking for Original file, "+je.toString());
return null;
}
}
// translate bundle name from METS to DSpace; METS may be "CONTENT"
// or "ORIGINAL" for the DSPace "ORIGINAL", rest are left alone.
private static String normalizeBundleName(String in)
{
if (in.equals("CONTENT"))
{
return Constants.CONTENT_BUNDLE_NAME;
}
else if (in.equals("MANIFESTMD"))
{
return Constants.METADATA_BUNDLE_NAME;
}
return in;
}
/**
* Get the DSpace bundle name corresponding to the <code>USE</code>
* attribute of the file group enclosing this <code>file</code> element.
*
* @return DSpace bundle name
* @throws MetadataValidationException when there is no USE attribute on the enclosing fileGrp.
*/
public static String getBundleName(Element file)
throws MetadataValidationException
{
Element fg = file.getParentElement();
String fgUse = fg.getAttributeValue("USE");
if (fgUse == null)
{
throw new MetadataValidationException("Invalid METS Manifest: every fileGrp element must have a USE attribute.");
}
return normalizeBundleName(fgUse);
}
/**
* Get the "local" file name of this <code>file</code> or <code>mdRef</code> element.
* By "local" we mean the reference to the actual resource containing
* the data for this file, e.g. a relative path within a Zip or tar archive
* if the METS is serving as a manifest for that sort of package.
* @return "local" file name (i.e. relative to package or content
* directory) corresponding to this <code>file</code> or <code>mdRef</code> element.
* @throws MetadataValidationException when there is not enough information to find a resource identifier.
*/
public static String getFileName(Element file)
throws MetadataValidationException
{
Element ref;
if (file.getName().equals("file"))
{
ref = file.getChild("FLocat", metsNS);
if (ref == null)
{
// check for forbidden FContent child first:
if (file.getChild("FContent", metsNS) == null)
{
throw new MetadataValidationException("Invalid METS Manifest: Every file element must have FLocat child.");
}
else
{
throw new MetadataValidationException("Invalid METS Manifest: file element has forbidden FContent child, only FLocat is allowed.");
}
}
}
else if (file.getName().equals("mdRef"))
{
ref = file;
}
else
{
throw new MetadataValidationException("getFileName() called with recognized element type: " + file.toString());
}
String loctype = ref.getAttributeValue("LOCTYPE");
if (loctype != null && loctype.equals("URL"))
{
String result = ref.getAttributeValue("href", xlinkNS);
if (result == null)
{
throw new MetadataValidationException("Invalid METS Manifest: FLocat/mdRef is missing the required xlink:href attribute.");
}
return result;
}
throw new MetadataValidationException("Invalid METS Manifest: FLocat/mdRef does not have LOCTYPE=\"URL\" attribute.");
}
/**
* Returns file element corresponding to primary bitstream.
* There is <i>ONLY</i> a primary bitstream if the first {@code div} under
* first {@code structMap} has an {@code fptr}.
*
* @return file element of Item's primary bitstream, or null if there is none.
*/
public Element getPrimaryOrLogoBitstream()
throws MetadataValidationException
{
Element objDiv = getObjStructDiv();
Element fptr = objDiv.getChild("fptr", metsNS);
if (fptr == null)
{
return null;
}
String id = fptr.getAttributeValue("FILEID");
if (id == null)
{
throw new MetadataValidationException("fptr for Primary Bitstream is missing the required FILEID attribute.");
}
Element result = getElementByXPath("descendant::mets:file[@ID=\""+id+"\"]", false);
if (result == null)
{
throw new MetadataValidationException("Cannot find file element for Primary Bitstream: looking for ID=" + id);
}
return result;
}
/**
* Get the metadata type from within a *mdSec element.
* @return metadata type name.
*/
public String getMdType(Element mdSec)
throws MetadataValidationException
{
Element md = mdSec.getChild("mdRef", metsNS);
if (md == null)
{
md = mdSec.getChild("mdWrap", metsNS);
}
if (md == null)
{
throw new MetadataValidationException("Invalid METS Manifest: ?mdSec element has neither mdRef nor mdWrap child.");
}
String result = md.getAttributeValue("MDTYPE");
if (result != null && result.equals("OTHER"))
{
result = md.getAttributeValue("OTHERMDTYPE");
}
if (result == null)
{
throw new MetadataValidationException("Invalid METS Manifest: " + md.getName() + " has no MDTYPE or OTHERMDTYPE attribute.");
}
return result;
}
/**
* Returns MIME type of metadata content, if available.
* @return MIMEtype word, or null if none is available.
*/
public String getMdContentMimeType(Element mdSec)
throws MetadataValidationException
{
Element mdWrap = mdSec.getChild("mdWrap", metsNS);
if (mdWrap != null)
{
String mimeType = mdWrap.getAttributeValue("MIMETYPE");
if (mimeType == null && mdWrap.getChild("xmlData", metsNS) != null)
{
mimeType = "text/xml";
}
return mimeType;
}
Element mdRef = mdSec.getChild("mdRef", metsNS);
if (mdRef != null)
{
return mdRef.getAttributeValue("MIMETYPE");
}
return null;
}
/**
* Return contents of *md element as List of XML Element objects.
* Gets content, dereferencing mdRef if necessary, or decoding and parsing
* a binData that contains XML.
* @return contents of metadata section, or empty list if no XML content is available.
* @throws MetadataValidationException if METS is invalid, or there is an error parsing the XML.
*/
public List<Element> getMdContentAsXml(Element mdSec, Mdref callback)
throws MetadataValidationException, PackageValidationException,
IOException, SQLException, AuthorizeException
{
try
{
// XXX sanity check: if this has more than one child, consider it
// an error since we cannot deal with more than one mdRef|mdWrap
// child. This may be considered a bug and need to be fixed,
// so it's best to bring it to the attention of users.
List mdc = mdSec.getChildren();
if (mdc.size() > 1)
{
// XXX scaffolding for debugging diagnosis; at least one
// XML parser stupidly includes newlines in prettyprinting
// as text content objects..
String id = mdSec.getAttributeValue("ID");
StringBuffer sb = new StringBuffer();
for (Iterator mi = mdc.iterator(); mi.hasNext();)
{
sb.append(", ").append(((Content)mi.next()).toString());
}
throw new MetadataValidationException("Cannot parse METS with "+mdSec.getQualifiedName()+" element that contains more than one child, size="+String.valueOf(mdc.size())+", ID="+id+"Kids="+sb.toString());
}
Element mdRef = null;
Element mdWrap = mdSec.getChild("mdWrap", metsNS);
if (mdWrap != null)
{
Element xmlData = mdWrap.getChild("xmlData", metsNS);
if (xmlData == null)
{
Element bin = mdWrap.getChild("binData", metsNS);
if (bin == null)
{
throw new MetadataValidationException("Invalid METS Manifest: mdWrap element with neither xmlData nor binData child.");
}
// if binData is actually XML, return it; otherwise ignore.
else
{
String mimeType = mdWrap.getAttributeValue("MIMETYPE");
if (mimeType != null && mimeType.equalsIgnoreCase("text/xml"))
{
byte value[] = Base64.decodeBase64(bin.getText().getBytes());
Document mdd = parser.build(new ByteArrayInputStream(value));
List<Element> result = new ArrayList<Element>(1);
result.add(mdd.getRootElement());
return result;
}
else
{
log.warn("Ignoring binData section because MIMETYPE is not XML, but: "+mimeType);
return new ArrayList<Element>(0);
}
}
}
else
{
return xmlData.getChildren();
}
}
else
{
mdRef = mdSec.getChild("mdRef", metsNS);
if (mdRef != null)
{
String mimeType = mdRef.getAttributeValue("MIMETYPE");
if (mimeType != null && mimeType.equalsIgnoreCase("text/xml"))
{
Document mdd = parser.build(callback.getInputStream(mdRef));
List<Element> result = new ArrayList<Element>(1);
result.add(mdd.getRootElement());
return result;
}
else
{
log.warn("Ignoring mdRef section because MIMETYPE is not XML, but: "+mimeType);
return new ArrayList<Element>(0);
}
}
else
{
throw new MetadataValidationException("Invalid METS Manifest: ?mdSec element with neither mdRef nor mdWrap child.");
}
}
}
catch (JDOMException je)
{
throw new MetadataValidationException("Error parsing or validating metadata section in mdRef or binData within "+mdSec.toString(), je);
}
}
/**
* Return contents of *md element as stream.
* Gets content, dereferencing mdRef if necessary, or decoding
* a binData element if necessary.
* @return Stream containing contents of metadata section. Never returns null.
* @throws MetadataValidationException if METS format does not contain any metadata.
*/
public InputStream getMdContentAsStream(Element mdSec, Mdref callback)
throws MetadataValidationException, PackageValidationException,
IOException, SQLException, AuthorizeException
{
Element mdRef = null;
Element mdWrap = mdSec.getChild("mdWrap", metsNS);
if (mdWrap != null)
{
Element xmlData = mdWrap.getChild("xmlData", metsNS);
if (xmlData == null)
{
Element bin = mdWrap.getChild("binData", metsNS);
if (bin == null)
{
throw new MetadataValidationException("Invalid METS Manifest: mdWrap element with neither xmlData nor binData child.");
}
else
{
byte value[] = Base64.decodeBase64(bin.getText().getBytes());
return new ByteArrayInputStream(value);
}
}
else
{
XMLOutputter outputPretty = new XMLOutputter(Format.getPrettyFormat());
return new ByteArrayInputStream(
outputPretty.outputString(xmlData.getChildren()).getBytes());
}
}
else
{
mdRef = mdSec.getChild("mdRef", metsNS);
if (mdRef != null)
{
return callback.getInputStream(mdRef);
}
else
{
throw new MetadataValidationException("Invalid METS Manifest: ?mdSec element with neither mdRef nor mdWrap child.");
}
}
}
/**
* Return the {@code <div>} which describes this DSpace Object (and its contents)
* from the {@code <structMap>}. In all cases, this is the first {@code <div>}
* in the first {@code <structMap>}.
*
* @return Element which is the DSpace Object Contents {@code <div>}
* @throws MetadataValidationException
*/
public Element getObjStructDiv()
throws MetadataValidationException
{
//get first <structMap>
Element sm = mets.getChild("structMap", metsNS);
if (sm == null)
{
throw new MetadataValidationException("METS document is missing the required structMap element.");
}
//get first <div>
Element result = sm.getChild("div", metsNS);
if (result == null)
{
throw new MetadataValidationException("METS document is missing the required first div element in first structMap.");
}
if (log.isDebugEnabled())
{
log.debug("Got getObjStructDiv result=" + result.toString());
}
return (Element)result;
}
/**
* Get an array of child object {@code <div>}s from the METS Manifest {@code <structMap>}.
* These {@code <div>}s reference the location of any child objects METS manifests.
*
* @return a List of {@code Element}s, each a {@code <div>}. May be empty but NOT null.
* @throws MetadataValidationException
*/
public List getChildObjDivs()
throws MetadataValidationException
{
//get the <div> in <structMap> which describes the current object's contents
Element objDiv = getObjStructDiv();
//get the child <div>s -- these should reference the child METS manifest
return objDiv.getChildren("div", metsNS);
}
/**
* Retrieve the file paths for the children objects' METS Manifest files.
* These file paths are located in the {@code <mptr>} where @LOCTYPE=URL
*
* @return a list of Strings, corresponding to relative file paths of children METS manifests
* @throws MetadataValidationException
*/
public String[] getChildMetsFilePaths()
throws MetadataValidationException
{
//get our child object <div>s
List childObjDivs = getChildObjDivs();
List<String> childPathList = new ArrayList<String>();
if(childObjDivs != null && !childObjDivs.isEmpty())
{
Iterator childIterator = childObjDivs.iterator();
//For each Div, we want to find the underlying <mptr> with @LOCTYPE=URL
while(childIterator.hasNext())
{
Element childDiv = (Element) childIterator.next();
//get all child <mptr>'s
List childMptrs = childDiv.getChildren("mptr", metsNS);
if(childMptrs!=null && !childMptrs.isEmpty())
{
Iterator mptrIterator = childMptrs.iterator();
//For each mptr, we want to find the one with @LOCTYPE=URL
while(mptrIterator.hasNext())
{
Element mptr = (Element) mptrIterator.next();
String locType = mptr.getAttributeValue("LOCTYPE");
//if @LOCTYPE=URL, then capture @xlink:href as the METS Manifest file path
if (locType!=null && locType.equals("URL"))
{
String filePath = mptr.getAttributeValue("href", xlinkNS);
if(filePath!=null && filePath.length()>0)
{
childPathList.add(filePath);
}
}
}//end <mptr> loop
}//end if <mptr>'s exist
}//end child <div> loop
}//end if child <div>s exist
String[] childPaths = new String[childPathList.size()];
childPaths = (String[]) childPathList.toArray(childPaths);
return childPaths;
}
/**
* Return the reference to the Parent Object from the "Parent" {@code <structMap>}.
* This parent object is the owner of current object.
*
* @return Link to the Parent Object (this is the Handle of that Parent)
* @throws MetadataValidationException
*/
public String getParentOwnerLink()
throws MetadataValidationException
{
//get a list of our structMaps
List<Element> childStructMaps = mets.getChildren("structMap", metsNS);
Element parentStructMap = null;
// find the <structMap LABEL='Parent'>
if(!childStructMaps.isEmpty())
{
for (Element structMap : childStructMaps)
{
String label = structMap.getAttributeValue("LABEL");
if(label!=null && label.equalsIgnoreCase("Parent"))
{
parentStructMap = structMap;
break;
}
}
}
if (parentStructMap == null)
{
throw new MetadataValidationException("METS document is missing the required structMap[@LABEL='Parent'] element.");
}
//get first <div>
Element linkDiv = parentStructMap.getChild("div", metsNS);
if (linkDiv == null)
{
throw new MetadataValidationException("METS document is missing the required first div element in structMap[@LABEL='Parent'].");
}
//the link is in the <mptr> in the @xlink:href attribute
Element mptr = linkDiv.getChild("mptr", metsNS);
if (mptr != null)
{
return mptr.getAttributeValue("href", xlinkNS);
}
//return null if we couldn't find the link
return null;
}
// return a single Element node found by one-off path.
// use only when path varies each time you call it.
private Element getElementByXPath(String path, boolean nullOk)
throws MetadataValidationException
{
try
{
XPath xpath = XPath.newInstance(path);
xpath.addNamespace(metsNS);
xpath.addNamespace(xlinkNS);
Object result = xpath.selectSingleNode(mets);
if (result == null && nullOk)
{
return null;
}
else if (result instanceof Element)
{
return (Element) result;
}
else
{
throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\"" + path + "\"");
}
}
catch (JDOMException je)
{
throw new MetadataValidationException("METSManifest: Failed to resolve XPath, path=\""+path+"\"", je);
}
}
// Find crosswalk for the indicated metadata type (e.g. "DC", "MODS")
private Object getCrosswalk(String type, Class clazz)
{
/**
* Allow DSpace Config to map the metadata type to a
* different crosswalk name either per-packager or for METS
* in general. First, look for config key like:
* mets.<packagerName>.ingest.crosswalk.MDNAME = XWALKNAME
* then try
* mets.default.ingest.crosswalk.MDNAME = XWALKNAME
*/
String xwalkName = ConfigurationManager.getProperty(
CONFIG_METS_PREFIX+configName+".ingest.crosswalk."+type);
if (xwalkName == null)
{
xwalkName = ConfigurationManager.getProperty(
CONFIG_METS_PREFIX+"default.ingest.crosswalk."+type);
if (xwalkName == null)
{
xwalkName = type;
}
}
return PluginManager.getNamedPlugin(clazz, xwalkName);
}
/**
* Gets all dmdSec elements containing metadata for the DSpace Item.
*
* @return array of Elements, each a dmdSec. May be empty but NOT null.
* @throws MetadataValidationException if the METS is missing a reference to item-wide
* DMDs in the correct place.
*/
public Element[] getItemDmds()
throws MetadataValidationException
{
// div@DMDID is actually IDREFS, a space-separated list of IDs:
Element objDiv = getObjStructDiv();
String dmds = objDiv.getAttributeValue("DMDID");
if (dmds == null)
{
throw new MetadataValidationException("Invalid METS: Missing reference to Item descriptive metadata, first div on first structmap must have a DMDID attribute.");
}
return getDmdElements(dmds);
}
/**
* Gets all dmdSec elements from a space separated list
*
* @param dmdList space-separated list of DMDIDs
* @return array of Elements, each a dmdSec. May be empty but NOT null.
* @throws MetadataValidationException if the METS is missing a reference to item-wide
* DMDs in the correct place.
*/
public Element[] getDmdElements(String dmdList)
throws MetadataValidationException
{
if(dmdList!=null && !dmdList.isEmpty())
{
String dmdID[] = dmdList.split("\\s+");
Element result[] = new Element[dmdID.length];
for (int i = 0; i < dmdID.length; ++i)
{
result[i] = getElementByXPath("mets:dmdSec[@ID=\""+dmdID[i]+"\"]", false);
}
return result;
}
else
{
return new Element[0];
}
}
/**
* Return rights metadata section(s) relevant to item as a whole.
* @return array of rightsMd elements, possibly empty but never null.
* @throws MetadataValidationException if METS is invalid, e.g. referenced amdSec is missing.
*/
public Element[] getItemRightsMD()
throws MetadataValidationException
{
// div@ADMID is actually IDREFS, a space-separated list of IDs:
Element objDiv = getObjStructDiv();
String amds = objDiv.getAttributeValue("ADMID");
if (amds == null)
{
if (log.isDebugEnabled())
{
log.debug("getItemRightsMD: No ADMID references found.");
}
return new Element[0];
}
String amdID[] = amds.split("\\s+");
List<Element> resultList = new ArrayList<Element>();
for (int i = 0; i < amdID.length; ++i)
{
List rmds = getElementByXPath("mets:amdSec[@ID=\""+amdID[i]+"\"]", false).
getChildren("rightsMD", metsNS);
if (rmds.size() > 0)
{
resultList.addAll(rmds);
}
}
return resultList.toArray(new Element[resultList.size()]);
}
/**
* Invokes appropriate crosswalks on Item-wide descriptive metadata.
*/
public void crosswalkItemDmd(Context context, PackageParameters params,
DSpaceObject dso,
Element dmdSec, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
crosswalkXmd(context, params, dso, dmdSec, callback);
}
/**
* Crosswalk all technical and source metadata sections that belong
* to the whole object.
* @throws MetadataValidationException if METS is invalid, e.g. referenced amdSec is missing.
*/
public void crosswalkObjectOtherAdminMD(Context context, PackageParameters params,
DSpaceObject dso, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
for (String amdID : getAmdIDs())
{
Element amdSec = getElementByXPath("mets:amdSec[@ID=\""+amdID+"\"]", false);
for (Iterator ti = amdSec.getChildren("techMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, dso, (Element)ti.next(), callback);
}
for (Iterator ti = amdSec.getChildren("digiprovMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, dso, (Element)ti.next(), callback);
}
for (Iterator ti = amdSec.getChildren("rightsMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, dso, (Element)ti.next(), callback);
}
}
}
/**
* Just crosswalk the sourceMD sections; used to set the handle and parent of AIP.
* @return true if any metadata section was actually crosswalked, false otherwise
*/
public boolean crosswalkObjectSourceMD(Context context, PackageParameters params,
DSpaceObject dso, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
boolean result = false;
for (String amdID : getAmdIDs())
{
Element amdSec = getElementByXPath("mets:amdSec[@ID=\""+amdID+"\"]", false);
for (Iterator ti = amdSec.getChildren("sourceMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, dso, (Element)ti.next(), callback);
result = true;
}
}
return result;
}
/**
* Get an aray of all AMDID values for this object
*
* @return
* @throws MetadataValidationException
*/
private String[] getAmdIDs()
throws MetadataValidationException
{
// div@ADMID is actually IDREFS, a space-separated list of IDs:
Element objDiv = getObjStructDiv();
String amds = objDiv.getAttributeValue("ADMID");
if (amds == null)
{
if (log.isDebugEnabled())
{
log.debug("crosswalkObjectTechMD: No ADMID references found.");
}
return new String[0];
}
return amds.split("\\s+");
}
// Crosswalk *any* kind of metadata section - techMD, rightsMD, etc.
private void crosswalkXmd(Context context, PackageParameters params,
DSpaceObject dso,
Element xmd, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
String type = getMdType(xmd);
//First, try to find the IngestionCrosswalk to use
IngestionCrosswalk xwalk = (IngestionCrosswalk)getCrosswalk(type, IngestionCrosswalk.class);
// If metadata is not simply applicable to object,
// let it go with a warning.
try
{
// If we found the IngestionCrosswalk, crosswalk our XML-based content
if (xwalk != null)
{
// Check if our Crosswalk actually wraps another Packager Plugin
if(xwalk instanceof AbstractPackagerWrappingCrosswalk)
{
// If this crosswalk wraps another Packager Plugin, we can pass it our Packaging Parameters
// (which essentially allow us to customize the ingest process of the crosswalk)
AbstractPackagerWrappingCrosswalk wrapper = (AbstractPackagerWrappingCrosswalk) xwalk;
wrapper.setPackagingParameters(params);
}
xwalk.ingest(context, dso, getMdContentAsXml(xmd,callback));
}
// Otherwise, try stream-based crosswalk
else
{
StreamIngestionCrosswalk sxwalk =
(StreamIngestionCrosswalk)getCrosswalk(type, StreamIngestionCrosswalk.class);
if (sxwalk != null)
{
// Check if our Crosswalk actually wraps another Packager Plugin
if(sxwalk instanceof AbstractPackagerWrappingCrosswalk)
{
// If this crosswalk wraps another Packager Plugin, we can pass it our Packaging Parameters
// (which essentially allow us to customize the ingest process of the crosswalk)
AbstractPackagerWrappingCrosswalk wrapper = (AbstractPackagerWrappingCrosswalk) sxwalk;
wrapper.setPackagingParameters(params);
}
// If we found a Stream-based crosswalk that matches, we now want to
// locate the stream we are crosswalking. This stream should be
// references in METS via an <mdRef> element
// (which is how METS references external files)
Element mdRef = xmd.getChild("mdRef", metsNS);
if (mdRef != null)
{
InputStream in = null;
try
{
in = callback.getInputStream(mdRef);
sxwalk.ingest(context, dso, in,
mdRef.getAttributeValue("MIMETYPE"));
}
finally
{
if (in != null)
{
in.close();
}
}
} // If we couldn't find an <mdRef>, then we'll try an <mdWrap>
// with a <binData> element instead.
// (this is how METS wraps embedded base64-encoded content streams)
else
{
Element mdWrap = xmd.getChild("mdWrap", metsNS);
if (mdWrap != null)
{
Element bin = mdWrap.getChild("binData", metsNS);
if (bin == null)
{
throw new MetadataValidationException("Invalid METS Manifest: mdWrap element for streaming crosswalk without binData child.");
}
else
{
byte value[] = Base64.decodeBase64(bin.getText().getBytes());
sxwalk.ingest(context, dso,
new ByteArrayInputStream(value),
mdWrap.getAttributeValue("MIMETYPE"));
}
}
else
{
throw new MetadataValidationException("Cannot process METS Manifest: " +
"Metadata of type=" + type + " requires a reference to a stream (mdRef), which was not found in " + xmd.getName());
}
}
}
else
{
throw new MetadataValidationException("Cannot process METS Manifest: " +
"No crosswalk found for contents of " + xmd.getName() + " element, MDTYPE=" + type);
}
}
}
catch (CrosswalkObjectNotSupported e)
{
log.warn("Skipping metadata section "+xmd.getName()+", type="+type+" inappropriate for this type of object: Object="+dso.toString()+", error="+e.toString());
}
}
/**
* Crosswalk the metadata associated with a particular <code>file</code>
* element into the bitstream it corresponds to.
* @param context a dspace context.
* @param params any PackageParameters which may affect how bitstreams are crosswalked
* @param bitstream bitstream target of the crosswalk
* @param fileId value of ID attribute in the file element responsible
* for the contents of that bitstream.
* @param callback ???
*/
public void crosswalkBitstream(Context context, PackageParameters params,
Bitstream bitstream,
String fileId, Mdref callback)
throws MetadataValidationException, PackageValidationException,
CrosswalkException, IOException, SQLException, AuthorizeException
{
Element file = getElementByXPath("descendant::mets:file[@ID=\""+fileId+"\"]", false);
if (file == null)
{
throw new MetadataValidationException("Failed in Bitstream crosswalk, Could not find file element with ID=" + fileId);
}
// In DSpace METS SIP spec, admin metadata is only "highly
// recommended", not "required", so it is OK if there is no ADMID.
String amds = file.getAttributeValue("ADMID");
if (amds == null)
{
log.warn("Got no bitstream ADMID, file@ID="+fileId);
return;
}
String amdID[] = amds.split("\\s+");
for (int i = 0; i < amdID.length; ++i)
{
Element amdSec = getElementByXPath("mets:amdSec[@ID=\""+amdID[i]+"\"]", false);
for (Iterator ti = amdSec.getChildren("techMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, bitstream, (Element)ti.next(), callback);
}
for (Iterator ti = amdSec.getChildren("sourceMD", metsNS).iterator(); ti.hasNext();)
{
crosswalkXmd(context, params, bitstream, (Element)ti.next(), callback);
}
}
}
/**
* @return root element of METS document.
*/
public Element getMets()
{
return mets;
}
/**
* Return entire METS document as an inputStream
*
* @return entire METS document as a stream
*/
public InputStream getMetsAsStream()
{
XMLOutputter outputPretty = new XMLOutputter(Format.getPrettyFormat());
return new ByteArrayInputStream(
outputPretty.outputString(mets).getBytes());
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.devtestlabs.v2018_09_15;
import com.microsoft.azure.arm.model.HasInner;
import com.microsoft.azure.management.devtestlabs.v2018_09_15.implementation.NotificationChannelInner;
import com.microsoft.azure.arm.model.Indexable;
import com.microsoft.azure.arm.model.Refreshable;
import com.microsoft.azure.arm.model.Updatable;
import com.microsoft.azure.arm.model.Appliable;
import com.microsoft.azure.arm.model.Creatable;
import com.microsoft.azure.arm.resources.models.HasManager;
import com.microsoft.azure.management.devtestlabs.v2018_09_15.implementation.DevTestLabsManager;
import java.util.Map;
import java.util.List;
import org.joda.time.DateTime;
/**
* Type representing NotificationChannel.
*/
public interface NotificationChannel extends HasInner<NotificationChannelInner>, Indexable, Refreshable<NotificationChannel>, Updatable<NotificationChannel.Update>, HasManager<DevTestLabsManager> {
/**
* @return the createdDate value.
*/
DateTime createdDate();
/**
* @return the description value.
*/
String description();
/**
* @return the emailRecipient value.
*/
String emailRecipient();
/**
* @return the events value.
*/
List<Event> events();
/**
* @return the id value.
*/
String id();
/**
* @return the location value.
*/
String location();
/**
* @return the name value.
*/
String name();
/**
* @return the notificationLocale value.
*/
String notificationLocale();
/**
* @return the provisioningState value.
*/
String provisioningState();
/**
* @return the tags value.
*/
Map<String, String> tags();
/**
* @return the type value.
*/
String type();
/**
* @return the uniqueIdentifier value.
*/
String uniqueIdentifier();
/**
* @return the webHookUrl value.
*/
String webHookUrl();
/**
* The entirety of the NotificationChannel definition.
*/
interface Definition extends DefinitionStages.Blank, DefinitionStages.WithLab, DefinitionStages.WithCreate {
}
/**
* Grouping of NotificationChannel definition stages.
*/
interface DefinitionStages {
/**
* The first stage of a NotificationChannel definition.
*/
interface Blank extends WithLab {
}
/**
* The stage of the notificationchannel definition allowing to specify Lab.
*/
interface WithLab {
/**
* Specifies resourceGroupName, labName.
* @param resourceGroupName The name of the resource group
* @param labName The name of the lab
* @return the next definition stage
*/
WithCreate withExistingLab(String resourceGroupName, String labName);
}
/**
* The stage of the notificationchannel definition allowing to specify Description.
*/
interface WithDescription {
/**
* Specifies description.
* @param description Description of notification
* @return the next definition stage
*/
WithCreate withDescription(String description);
}
/**
* The stage of the notificationchannel definition allowing to specify EmailRecipient.
*/
interface WithEmailRecipient {
/**
* Specifies emailRecipient.
* @param emailRecipient The email recipient to send notifications to (can be a list of semi-colon separated email addresses)
* @return the next definition stage
*/
WithCreate withEmailRecipient(String emailRecipient);
}
/**
* The stage of the notificationchannel definition allowing to specify Events.
*/
interface WithEvents {
/**
* Specifies events.
* @param events The list of event for which this notification is enabled
* @return the next definition stage
*/
WithCreate withEvents(List<Event> events);
}
/**
* The stage of the notificationchannel definition allowing to specify Location.
*/
interface WithLocation {
/**
* Specifies location.
* @param location The location of the resource
* @return the next definition stage
*/
WithCreate withLocation(String location);
}
/**
* The stage of the notificationchannel definition allowing to specify NotificationLocale.
*/
interface WithNotificationLocale {
/**
* Specifies notificationLocale.
* @param notificationLocale The locale to use when sending a notification (fallback for unsupported languages is EN)
* @return the next definition stage
*/
WithCreate withNotificationLocale(String notificationLocale);
}
/**
* The stage of the notificationchannel definition allowing to specify Tags.
*/
interface WithTags {
/**
* Specifies tags.
* @param tags The tags of the resource
* @return the next definition stage
*/
WithCreate withTags(Map<String, String> tags);
}
/**
* The stage of the notificationchannel definition allowing to specify WebHookUrl.
*/
interface WithWebHookUrl {
/**
* Specifies webHookUrl.
* @param webHookUrl The webhook URL to send notifications to
* @return the next definition stage
*/
WithCreate withWebHookUrl(String webHookUrl);
}
/**
* The stage of the definition which contains all the minimum required inputs for
* the resource to be created (via {@link WithCreate#create()}), but also allows
* for any other optional settings to be specified.
*/
interface WithCreate extends Creatable<NotificationChannel>, DefinitionStages.WithDescription, DefinitionStages.WithEmailRecipient, DefinitionStages.WithEvents, DefinitionStages.WithLocation, DefinitionStages.WithNotificationLocale, DefinitionStages.WithTags, DefinitionStages.WithWebHookUrl {
}
}
/**
* The template for a NotificationChannel update operation, containing all the settings that can be modified.
*/
interface Update extends Appliable<NotificationChannel>, UpdateStages.WithDescription, UpdateStages.WithEmailRecipient, UpdateStages.WithEvents, UpdateStages.WithNotificationLocale, UpdateStages.WithTags, UpdateStages.WithWebHookUrl {
}
/**
* Grouping of NotificationChannel update stages.
*/
interface UpdateStages {
/**
* The stage of the notificationchannel update allowing to specify Description.
*/
interface WithDescription {
/**
* Specifies description.
* @param description Description of notification
* @return the next update stage
*/
Update withDescription(String description);
}
/**
* The stage of the notificationchannel update allowing to specify EmailRecipient.
*/
interface WithEmailRecipient {
/**
* Specifies emailRecipient.
* @param emailRecipient The email recipient to send notifications to (can be a list of semi-colon separated email addresses)
* @return the next update stage
*/
Update withEmailRecipient(String emailRecipient);
}
/**
* The stage of the notificationchannel update allowing to specify Events.
*/
interface WithEvents {
/**
* Specifies events.
* @param events The list of event for which this notification is enabled
* @return the next update stage
*/
Update withEventFragments(List<EventFragment> events);
}
/**
* The stage of the notificationchannel update allowing to specify NotificationLocale.
*/
interface WithNotificationLocale {
/**
* Specifies notificationLocale.
* @param notificationLocale The locale to use when sending a notification (fallback for unsupported languages is EN)
* @return the next update stage
*/
Update withNotificationLocale(String notificationLocale);
}
/**
* The stage of the notificationchannel update allowing to specify Tags.
*/
interface WithTags {
/**
* Specifies tags.
* @param tags The tags of the resource
* @return the next update stage
*/
Update withTags(Map<String, String> tags);
}
/**
* The stage of the notificationchannel update allowing to specify WebHookUrl.
*/
interface WithWebHookUrl {
/**
* Specifies webHookUrl.
* @param webHookUrl The webhook URL to send notifications to
* @return the next update stage
*/
Update withWebHookUrl(String webHookUrl);
}
}
}
| |
/**
Copyright 2015, James G. Willmore
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.ljcomputing.people.domain;
import java.util.UUID;
import net.ljcomputing.core.domain.AbstractDomain;
/**
* Class representing a person.
*
* @author James G. Willmore
*
*/
public class Person extends AbstractDomain {
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 4172334299699269349L;
/** The saluation. */
private SalutationType saluation = SalutationType.NONE;
/** The first name. */
private String firstName;
/** The middle name. */
private String middleName;
/** The last name. */
private String lastName;
/** The suffix. */
private String suffix;
/** The full name. */
private String fullName;
/**
* Instantiates a new person.
*/
public Person() {}
/**
* Instantiates a new person.
*
* @param salutation the salutation
* @param firstName the first name
* @param middleName the middle name
* @param lastName the last name
* @param suffix the suffix
* @param uuid the uuid
*/
public Person(SalutationType salutation, String firstName, String middleName, String lastName, String suffix,
UUID uuid) {
if (null == uuid) {
createUuid();
}
setSaluation(salutation);
setFirstName(firstName);
setMiddleName(middleName);
setLastName(lastName);
setSuffix(suffix);
createFullName();
}
/**
* Instantiates a new person (copy constructor).
*
* @param person the person
*/
public Person(Person person) {
this(person.getSaluation(), person.getFirstName(), person.getMiddleName(), person.getLastName(),
person.getSuffix(), person.getUuid());
}
/**
* Gets the saluation.
*
* @return the saluation
*/
public SalutationType getSaluation() {
return saluation;
}
/**
* Sets the saluation.
*
* @param saluation the new saluation
*/
public void setSaluation(SalutationType saluation) {
this.saluation = saluation;
}
/**
* Gets the first name.
*
* @return the first name
*/
public String getFirstName() {
return firstName;
}
/**
* Sets the first name.
*
* @param firstName the new first name
*/
public void setFirstName(String firstName) {
this.firstName = firstName;
createFullName();
}
/**
* Gets the middle name.
*
* @return the middle name
*/
public String getMiddleName() {
return middleName;
}
/**
* Sets the middle name.
*
* @param middleName the new middle name
*/
public void setMiddleName(String middleName) {
this.middleName = middleName;
createFullName();
}
/**
* Gets the last name.
*
* @return the last name
*/
public String getLastName() {
return lastName;
}
/**
* Sets the last name.
*
* @param lastName the new last name
*/
public void setLastName(String lastName) {
this.lastName = lastName;
createFullName();
}
/**
* Gets the suffix.
*
* @return the suffix
*/
public String getSuffix() {
return suffix;
}
/**
* Sets the suffix.
*
* @param suffix the new suffix
*/
public void setSuffix(String suffix) {
this.suffix = suffix;
}
/**
* Gets the full name.
*
* @return the full name
*/
public String getFullName() {
StringBuffer buf = new StringBuffer();
buf.append(
(null != saluation && !SalutationType.NONE.equals(saluation)) ? saluation.getDescription() + " " : "");
buf.append((null != firstName) ? firstName + " " : "");
buf.append((null != middleName) ? middleName + " " : "");
buf.append((null != lastName) ? lastName + " " : "");
buf.append((null != suffix) ? suffix : "");
return buf.toString();
}
/**
* Creates the full name.
*/
public void createFullName() {
fullName = getFullName();
}
/**
* Checks if person is valid.
*
* @return the boolean
*/
public Boolean isValid() {
Boolean result = Boolean.FALSE;
if (null != firstName && null != lastName) {
result = Boolean.TRUE;
}
return result;
}
@Override
public String toString() {
return "Person [saluation=" + saluation + ", firstName=" + firstName + ", middleName=" + middleName
+ ", lastName=" + lastName + ", suffix=" + suffix + ", fullName=" + fullName + ", uuid=" + uuid + "]";
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider;
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.booleanField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
*
*/
// TODO this can be made better, maybe storing a byte for it?
public class BooleanFieldMapper extends AbstractFieldMapper<Boolean> {
public static final String CONTENT_TYPE = "boolean";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_ONLY);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.freeze();
}
public static final Boolean NULL_VALUE = null;
}
public static class Values {
public final static BytesRef TRUE = new BytesRef("T");
public final static BytesRef FALSE = new BytesRef("F");
}
public static class Builder extends AbstractFieldMapper.Builder<Builder, BooleanFieldMapper> {
private Boolean nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE));
this.builder = this;
}
public Builder nullValue(boolean nullValue) {
this.nullValue = nullValue;
return this;
}
@Override
public Builder tokenized(boolean tokenized) {
if (tokenized) {
throw new ElasticsearchIllegalArgumentException("bool field can't be tokenized");
}
return super.tokenized(tokenized);
}
@Override
public BooleanFieldMapper build(BuilderContext context) {
return new BooleanFieldMapper(buildNames(context), boost, fieldType, nullValue, postingsProvider,
docValuesProvider, similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
BooleanFieldMapper.Builder builder = booleanField(name);
parseField(builder, name, node, parserContext);
for (Map.Entry<String, Object> entry : node.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeBooleanValue(propNode));
}
}
return builder;
}
}
private Boolean nullValue;
protected BooleanFieldMapper(Names names, float boost, FieldType fieldType, Boolean nullValue, PostingsFormatProvider postingsProvider,
DocValuesFormatProvider docValuesProvider, SimilarityProvider similarity, Loading normsLoading,
@Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, boost, fieldType, null, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, postingsProvider, docValuesProvider, similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
}
@Override
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
// TODO have a special boolean type?
return new FieldDataType("string");
}
@Override
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Boolean value(Object value) {
if (value == null) {
return Boolean.FALSE;
}
String sValue = value.toString();
if (sValue.length() == 0) {
return Boolean.FALSE;
}
if (sValue.length() == 1 && sValue.charAt(0) == 'F') {
return Boolean.FALSE;
}
if (Booleans.parseBoolean(sValue, false)) {
return Boolean.TRUE;
}
return Boolean.FALSE;
}
@Override
public Object valueForSearch(Object value) {
return value(value);
}
@Override
public BytesRef indexedValueForSearch(Object value) {
if (value == null) {
return Values.FALSE;
}
if (value instanceof Boolean) {
return ((Boolean) value) ? Values.TRUE : Values.FALSE;
}
String sValue;
if (value instanceof BytesRef) {
sValue = ((BytesRef) value).utf8ToString();
} else {
sValue = value.toString();
}
if (sValue.length() == 0) {
return Values.FALSE;
}
if (sValue.length() == 1 && sValue.charAt(0) == 'F') {
return Values.FALSE;
}
if (Booleans.parseBoolean(sValue, false)) {
return Values.TRUE;
}
return Values.FALSE;
}
@Override
public Filter nullValueFilter() {
if (nullValue == null) {
return null;
}
return new TermFilter(names().createIndexNameTerm(nullValue ? Values.TRUE : Values.FALSE));
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (!fieldType().indexed() && !fieldType().stored()) {
return;
}
Boolean value = context.parseExternalValue(Boolean.class);
if (value == null) {
XContentParser.Token token = context.parser().currentToken();
if (token == XContentParser.Token.VALUE_NULL) {
if (nullValue != null) {
value = nullValue;
}
} else {
value = context.parser().booleanValue();
}
}
if (value == null) {
return;
}
fields.add(new Field(names.indexName(), value ? "T" : "F", fieldType));
}
@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
super.merge(mergeWith, mergeContext);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeContext.mergeFlags().simulate()) {
this.nullValue = ((BooleanFieldMapper) mergeWith).nullValue;
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
}
}
@Override
public boolean hasDocValues() {
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.client.console.pages;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.syncope.client.console.commons.Constants;
import org.apache.syncope.client.console.commons.PreferenceManager;
import org.apache.syncope.common.lib.search.SearchableFields;
import org.apache.syncope.common.lib.to.UserTO;
import org.apache.wicket.PageReference;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.form.AjaxButton;
import org.apache.wicket.extensions.ajax.markup.html.IndicatingAjaxButton;
import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.Check;
import org.apache.wicket.markup.html.form.CheckGroup;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.list.ListItem;
import org.apache.wicket.markup.html.list.ListView;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.LoadableDetachableModel;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.model.ResourceModel;
import org.apache.wicket.spring.injection.annot.SpringBean;
/**
* Modal window with Display attributes form.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class DisplayAttributesModalPage extends BaseModalPage {
private static final long serialVersionUID = -4274117450918385110L;
/**
* Max allowed selections.
*/
private static final int MAX_SELECTIONS = 9;
public static final String[] DEFAULT_SELECTION = { "key", "username", "status" };
@SpringBean
private PreferenceManager prefMan;
private final List<String> selectedDetails;
private final List<String> selectedPlainSchemas;
private final List<String> selectedDerSchemas;
private final List<String> selectedVirSchemas;
public DisplayAttributesModalPage(final PageReference pageRef, final ModalWindow window,
final List<String> schemaNames, final List<String> dSchemaNames, final List<String> vSchemaNames) {
super();
final IModel<List<String>> fnames = new LoadableDetachableModel<List<String>>() {
private static final long serialVersionUID = 5275935387613157437L;
@Override
protected List<String> load() {
return SearchableFields.get(UserTO.class);
}
};
final IModel<List<String>> names = new LoadableDetachableModel<List<String>>() {
private static final long serialVersionUID = 5275935387613157437L;
@Override
protected List<String> load() {
return schemaNames;
}
};
final IModel<List<String>> dsnames = new LoadableDetachableModel<List<String>>() {
private static final long serialVersionUID = 5275935387613157437L;
@Override
protected List<String> load() {
return dSchemaNames;
}
};
final IModel<List<String>> vsnames = new LoadableDetachableModel<List<String>>() {
private static final long serialVersionUID = 5275935387613157437L;
@Override
protected List<String> load() {
return vSchemaNames;
}
};
final Form form = new Form(FORM);
form.setModel(new CompoundPropertyModel(this));
selectedDetails = prefMan.getList(getRequest(), Constants.PREF_USERS_DETAILS_VIEW);
selectedPlainSchemas = prefMan.getList(getRequest(), Constants.PREF_USERS_ATTRIBUTES_VIEW);
selectedDerSchemas = prefMan.getList(getRequest(), Constants.PREF_USERS_DERIVED_ATTRIBUTES_VIEW);
selectedVirSchemas = prefMan.getList(getRequest(), Constants.PREF_USERS_VIRTUAL_ATTRIBUTES_VIEW);
final CheckGroup dgroup = new CheckGroup("dCheckGroup", new PropertyModel(this, "selectedDetails"));
form.add(dgroup);
final ListView<String> details = new ListView<String>("details", fnames) {
private static final long serialVersionUID = 9101744072914090143L;
@Override
protected void populateItem(final ListItem<String> item) {
item.add(new Check("dcheck", item.getModel()));
item.add(new Label("dname", new ResourceModel(item.getModelObject(), item.getModelObject())));
}
};
dgroup.add(details);
if (names.getObject() == null || names.getObject().isEmpty()) {
final Fragment fragment = new Fragment("plainSchemas", "emptyFragment", form);
form.add(fragment);
selectedPlainSchemas.clear();
} else {
final Fragment fragment = new Fragment("plainSchemas", "sfragment", form);
form.add(fragment);
final CheckGroup sgroup = new CheckGroup("psCheckGroup", new PropertyModel(this, "selectedPlainSchemas"));
fragment.add(sgroup);
final ListView<String> schemas = new ListView<String>("plainSchemas", names) {
private static final long serialVersionUID = 9101744072914090143L;
@Override
protected void populateItem(final ListItem<String> item) {
item.add(new Check("scheck", item.getModel()));
item.add(new Label("sname", new ResourceModel(item.getModelObject(), item.getModelObject())));
}
};
sgroup.add(schemas);
}
if (dsnames.getObject() == null || dsnames.getObject().isEmpty()) {
final Fragment fragment = new Fragment("dschemas", "emptyFragment", form);
form.add(fragment);
selectedDerSchemas.clear();
} else {
final Fragment fragment = new Fragment("dschemas", "dsfragment", form);
form.add(fragment);
final CheckGroup dsgroup = new CheckGroup("dsCheckGroup", new PropertyModel(this, "selectedDerSchemas"));
fragment.add(dsgroup);
final ListView<String> derSchemas = new ListView<String>("derSchemas", dsnames) {
private static final long serialVersionUID = 9101744072914090143L;
@Override
protected void populateItem(ListItem<String> item) {
item.add(new Check("dscheck", item.getModel()));
item.add(new Label("dsname", new ResourceModel(item.getModelObject(), item.getModelObject())));
}
};
dsgroup.add(derSchemas);
}
if (vsnames.getObject() == null || vsnames.getObject().isEmpty()) {
final Fragment fragment = new Fragment("vschemas", "emptyFragment", form);
form.add(fragment);
selectedVirSchemas.clear();
} else {
final Fragment fragment = new Fragment("vschemas", "vsfragment", form);
form.add(fragment);
final CheckGroup vsgroup = new CheckGroup("vsCheckGroup", new PropertyModel(this, "selectedVirSchemas"));
fragment.add(vsgroup);
final ListView<String> virSchemas = new ListView<String>("virSchemas", vsnames) {
private static final long serialVersionUID = 9101744072914090143L;
@Override
protected void populateItem(ListItem<String> item) {
item.add(new Check("vscheck", item.getModel()));
item.add(new Label("vsname", new ResourceModel(item.getModelObject(), item.getModelObject())));
}
};
vsgroup.add(virSchemas);
}
final AjaxButton submit = new IndicatingAjaxButton(SUBMIT, new ResourceModel(SUBMIT)) {
private static final long serialVersionUID = -4804368561204623354L;
@Override
protected void onSubmit(final AjaxRequestTarget target, final Form<?> form) {
if (selectedDetails.size() + selectedPlainSchemas.size() + selectedVirSchemas.size() + selectedDerSchemas.
size()
> MAX_SELECTIONS) {
error(getString("tooManySelections"));
onError(target, form);
} else {
final Map<String, List<String>> prefs = new HashMap<String, List<String>>();
prefs.put(Constants.PREF_USERS_DETAILS_VIEW, selectedDetails);
prefs.put(Constants.PREF_USERS_ATTRIBUTES_VIEW, selectedPlainSchemas);
prefs.put(Constants.PREF_USERS_DERIVED_ATTRIBUTES_VIEW, selectedDerSchemas);
prefs.put(Constants.PREF_USERS_VIRTUAL_ATTRIBUTES_VIEW, selectedVirSchemas);
prefMan.setList(getRequest(), getResponse(), prefs);
((BasePage) pageRef.getPage()).setModalResult(true);
window.close(target);
}
}
@Override
protected void onError(final AjaxRequestTarget target, final Form<?> form) {
feedbackPanel.refresh(target);
}
};
form.add(submit);
final AjaxButton cancel = new IndicatingAjaxButton(CANCEL, new ResourceModel(CANCEL)) {
private static final long serialVersionUID = -958724007591692537L;
@Override
protected void onSubmit(final AjaxRequestTarget target, final Form<?> form) {
window.close(target);
}
};
cancel.setDefaultFormProcessing(false);
form.add(cancel);
add(form);
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portlet.announcements.model;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jasig.portlet.announcements.xml.Namespaces;
/**
* <p>Topic class.</p>
*
* @author Erik A. Olsson (eolsson@uci.edu)
* <p>$LastChangedBy$ $LastChangedDate$
* @version $Id: $Id
*/
@XmlType(namespace = Namespaces.TOPIC_NAMESPACE)
@XmlRootElement(name = "topic")
public class Topic {
/* Announcements for this topic are... */
/** Constant <code>PUSHED_FORCED=1</code> */
public static final int PUSHED_FORCED =
1; /* ...Pushed to the audience members and they cannot unsubscribe */
/** Constant <code>PUSHED_INITIAL=2</code> */
public static final int PUSHED_INITIAL = 2; /* ...Pushed initially, but users can unsubscribe */
/** Constant <code>PULLED=3</code> */
public static final int PULLED =
3; /* ...Not pushed to anybody, but target audience members can subscribe (pull) if they want to */
/** Constant <code>EMERGENCY=4</code> */
public static final int EMERGENCY = 4; /* A topic that supercedes all other topics */
private static final Log logger = LogFactory.getLog(Topic.class);
private Set<Announcement> announcements;
private Set<TopicSubscription> subscriptions;
private Set<String> admins;
private Set<String> moderators;
private Set<String> authors;
private Set<String> audience;
private String creator;
private String title;
private String description;
private boolean allowRss;
private int subscriptionMethod;
private Long id;
/**
* <p>Constructor for Topic.</p>
*/
public Topic() {
subscriptions = new HashSet<TopicSubscription>();
admins = new TreeSet<String>();
moderators = new TreeSet<String>();
authors = new TreeSet<String>();
audience = new TreeSet<String>();
}
/**
* <p>getGroup.</p>
*
* @param key a {@link java.lang.String} object.
* @return a {@link java.util.Set} object.
*/
public Set<String> getGroup(String key) {
if (UserRoles.ADMIN_ROLE_NAME.equals(key)) {
return getAdmins();
} else if (UserRoles.MODERATOR_ROLE_NAME.equals(key)) {
return getModerators();
} else if (UserRoles.AUTHOR_ROLE_NAME.equals(key)) {
return getAuthors();
} else if (UserRoles.AUDIENCE_ROLE_NAME.equals(key)) {
return getAudience();
}
throw new RuntimeException("Role not found: " + key);
}
/**
* <p>setGroup.</p>
*
* @param key a {@link java.lang.String} object.
* @param members a {@link java.util.Set} object.
*/
public void setGroup(String key, Set<String> members) {
if (UserRoles.ADMIN_ROLE_NAME.equals(key)) {
setAdmins(members);
} else if (UserRoles.MODERATOR_ROLE_NAME.equals(key)) {
setModerators(members);
} else if (UserRoles.AUTHOR_ROLE_NAME.equals(key)) {
setAuthors(members);
} else if (UserRoles.AUDIENCE_ROLE_NAME.equals(key)) {
setAudience(members);
} else {
throw new RuntimeException("Role not found: " + key);
}
}
/**
* <p>hasId.</p>
*
* @return a boolean.
*/
public boolean hasId() {
return (this.id != null);
}
/** @return the moderators */
/**
* <p>Getter for the field <code>moderators</code>.</p>
*
* @return a {@link java.util.Set} object.
*/
@XmlElementWrapper(name = "moderators")
@XmlElement(name = "moderator")
public Set<String> getModerators() {
return moderators;
}
/** @return the creator */
/**
* <p>Getter for the field <code>creator</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
@XmlElement(name = "creator", defaultValue = "system")
public String getCreator() {
return creator;
}
/** @return the title */
/**
* <p>Getter for the field <code>title</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
@XmlElement(name = "title", required = true)
public String getTitle() {
return title;
}
/** @return the description */
/**
* <p>Getter for the field <code>description</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
@XmlElement(name = "description")
public String getDescription() {
return description;
}
/** @return the allowRss */
/**
* <p>isAllowRss.</p>
*
* @return a boolean.
*/
@XmlElement(name = "allowRss")
public boolean isAllowRss() {
return allowRss;
}
/** @param moderators the moderators to set */
/**
* <p>Setter for the field <code>moderators</code>.</p>
*
* @param moderators a {@link java.util.Set} object.
*/
public void setModerators(Set<String> moderators) {
this.moderators = moderators;
}
/** @param creator the creator to set */
/**
* <p>Setter for the field <code>creator</code>.</p>
*
* @param creator a {@link java.lang.String} object.
*/
public void setCreator(String creator) {
this.creator = creator;
}
/** @param title the title to set */
/**
* <p>Setter for the field <code>title</code>.</p>
*
* @param title a {@link java.lang.String} object.
*/
public void setTitle(String title) {
this.title = title;
}
/** @param description the description to set */
/**
* <p>Setter for the field <code>description</code>.</p>
*
* @param description a {@link java.lang.String} object.
*/
public void setDescription(String description) {
this.description = description;
}
/** @param allowRss the allowRss to set */
/**
* <p>Setter for the field <code>allowRss</code>.</p>
*
* @param allowRss a boolean.
*/
public void setAllowRss(boolean allowRss) {
this.allowRss = allowRss;
}
/**
* <p>Getter for the field <code>subscriptions</code>.</p>
*
* @return a {@link java.util.Set} object.
*/
@XmlElementWrapper(name = "subscriptions")
@XmlElement(name = "subscription")
public Set<TopicSubscription> getSubscriptions() {
return subscriptions;
}
/**
* <p>Setter for the field <code>subscriptions</code>.</p>
*
* @param subscriptions a {@link java.util.Set} object.
*/
public void setSubscriptions(Set<TopicSubscription> subscriptions) {
this.subscriptions = subscriptions;
}
/**
* Returns a list of all announcements in this topic, regardless of status.
*
* @return the announcements
*/
@XmlElementWrapper(name = "announcements")
@XmlElement(name = "announcement")
public Set<Announcement> getAnnouncements() {
return announcements;
}
/**
* Returns a list of all published announcements in this topic. For topics to be included in this
* list, they must also be within their specified display period.
*
* @return the announcements
*/
@XmlTransient
public Set<Announcement> getPublishedAnnouncements() {
Set<Announcement> announcementsFiltered =
new HashSet<Announcement>(); // Don't use a TreeSet here... causes lost announcements
Date now = new Date();
if (this.announcements != null) {
for (Announcement ann : this.announcements) {
Date startDisplay = ann.getStartDisplay();
Date endDisplay = ann.getEndDisplay();
if (endDisplay == null) {
// Unspecified end date means the announcement does not expire; we
// will substitute a date in the future each time this item is
// evaluated.
long aYearFromNow = System.currentTimeMillis() + Announcement.MILLISECONDS_IN_A_YEAR;
endDisplay = new Date(aYearFromNow);
}
if (ann.getPublished() && startDisplay.before(now) && endDisplay.after(now)) {
announcementsFiltered.add(ann);
}
}
}
if (logger.isDebugEnabled()) {
logger.debug(
String.format(
"Returning %d of %d announcements",
this.announcements.size(), announcementsFiltered.size()));
}
return announcementsFiltered;
}
/**
* Returns a list of all historic announcements in this topic. Non-historic announcements are
* simply all non-expired announcements as well as announcements that expired less than a day ago.
*
* @return the announcements
*/
@XmlTransient
public Set<Announcement> getNonHistoricAnnouncements() {
Set<Announcement> announcementsFiltered =
new HashSet<Announcement>(); // Don't use a TreeSet here... causes lost announcements
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DATE, -1); // subtract 1 day from today.
Date date = cal.getTime();
if (this.announcements != null) {
for (Announcement ann : this.announcements) {
if ((ann.getEndDisplay() == null) || (date.before(ann.getEndDisplay()))) {
announcementsFiltered.add(ann);
}
}
}
if (logger.isDebugEnabled()) {
logger.debug(
String.format(
"Returning %d of %d announcements",
this.announcements.size(), announcementsFiltered.size()));
}
return announcementsFiltered;
}
/**
* Returns a list of all historic announcements in this topic.
*
* @return the announcements
*/
@XmlTransient
public Set<Announcement> getHistoricAnnouncements() {
Set<Announcement> announcementsFiltered =
new HashSet<Announcement>(); // Don't use a TreeSet here... causes lost announcements
Calendar cal = Calendar.getInstance();
Date dateStart = cal.getTime();
if (this.announcements != null) {
for (Announcement ann : this.announcements) {
if (ann.getEndDisplay().before(dateStart)) {
announcementsFiltered.add(ann);
}
}
}
if (logger.isDebugEnabled()) {
logger.debug(
String.format(
"Returning %d of %d announcements",
this.announcements.size(), announcementsFiltered.size()));
}
return announcementsFiltered;
}
/**
* Get the current number of displaying announcements
*
* @return a int.
*/
@XmlTransient
public int getDisplayingAnnouncementCount() {
return getPublishedAnnouncements().size();
}
/**
* Get the current number of approved & scheduled announcements
*
* @return a int.
*/
@XmlTransient
public int getScheduledAnnouncementCount() {
int count = 0;
Date now = new Date();
if (this.announcements != null) {
for (Announcement ann : this.announcements) {
if (ann.getPublished() && ann.getStartDisplay().after(now)) {
count++;
}
}
}
return count;
}
/**
* <p>getPendingAnnouncements.</p>
*
* @return a {@link java.util.Set} object.
*/
@XmlTransient
public Set<Announcement> getPendingAnnouncements() {
Set<Announcement> announcementsFiltered =
new HashSet<Announcement>(); // Don't use a TreeSet here... causes lost announcements
Date now = new Date();
if (this.announcements != null) {
for (Announcement ann : this.announcements) {
if (!ann.getPublished() && ann.getNullSafeEndDisplay().after(now)) {
announcementsFiltered.add(ann);
}
}
}
return announcementsFiltered;
}
/**
* Get the current number of pending announcements
*
* @return a int.
*/
@XmlTransient
public int getPendingAnnouncementCount() {
int count = 0;
if (this.announcements != null) {
for (Announcement ann : this.announcements) {
if (!ann.getPublished()) {
count++;
}
}
}
return count;
}
/** @return the id */
/**
* <p>Getter for the field <code>id</code>.</p>
*
* @return a {@link java.lang.Long} object.
*/
@XmlTransient
public Long getId() {
return id;
}
/** @param announcements the announcements to set */
/**
* <p>Setter for the field <code>announcements</code>.</p>
*
* @param announcements a {@link java.util.Set} object.
*/
public void setAnnouncements(Set<Announcement> announcements) {
this.announcements = announcements;
}
/** @param id the id to set */
/**
* <p>Setter for the field <code>id</code>.</p>
*
* @param id a {@link java.lang.Long} object.
*/
public void setId(Long id) {
this.id = id;
}
/** @return the authors */
/**
* <p>Getter for the field <code>authors</code>.</p>
*
* @return a {@link java.util.Set} object.
*/
@XmlElementWrapper(name = "authors")
@XmlElement(name = "author")
public Set<String> getAuthors() {
return authors;
}
/** @param authors the authors to set */
/**
* <p>Setter for the field <code>authors</code>.</p>
*
* @param authors a {@link java.util.Set} object.
*/
public void setAuthors(Set<String> authors) {
this.authors = authors;
}
/** @return the admins */
/**
* <p>Getter for the field <code>admins</code>.</p>
*
* @return a {@link java.util.Set} object.
*/
@XmlElementWrapper(name = "admins")
@XmlElement(name = "admin")
public Set<String> getAdmins() {
return admins;
}
/** @return the audience */
/**
* <p>Getter for the field <code>audience</code>.</p>
*
* @return a {@link java.util.Set} object.
*/
@XmlElementWrapper(name = "audience")
@XmlElement(name = "member")
public Set<String> getAudience() {
return audience;
}
/** @param admins the admins to set */
/**
* <p>Setter for the field <code>admins</code>.</p>
*
* @param admins a {@link java.util.Set} object.
*/
public void setAdmins(Set<String> admins) {
this.admins = admins;
}
/** @param audience the audience to set */
/**
* <p>Setter for the field <code>audience</code>.</p>
*
* @param audience a {@link java.util.Set} object.
*/
public void setAudience(Set<String> audience) {
this.audience = audience;
}
/** @return the subscriptionMethod */
/**
* <p>Getter for the field <code>subscriptionMethod</code>.</p>
*
* @return a int.
*/
@XmlElement(name = "subscriptionMethod", required = true)
public int getSubscriptionMethod() {
return subscriptionMethod;
}
/** @param subscriptionMethod the subscriptionMethod to set */
/**
* <p>Setter for the field <code>subscriptionMethod</code>.</p>
*
* @param subscriptionMethod a int.
*/
public void setSubscriptionMethod(int subscriptionMethod) {
this.subscriptionMethod = subscriptionMethod;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
/** {@inheritDoc} */
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
Topic t = (Topic) obj;
return (t.getId().compareTo(this.id) == 0);
}
/** {@inheritDoc} */
@Override
public int hashCode() {
int code = (title != null ? title : "").hashCode();
code += id != null && id > 0 ? id.intValue() : 0;
return code;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
/** {@inheritDoc} */
@Override
public String toString() {
return "Topic [allowRss="
+ allowRss
+ ", creator="
+ creator
+ ", description="
+ description
+ ", id="
+ id
+ ", moderators="
+ moderators
+ ", subscriptionMethod="
+ subscriptionMethod
+ ", title="
+ title
+ "]";
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ConcatenatedLists;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
/**
* Stripe implementation of StoreFileManager.
* Not thread safe - relies on external locking (in HStore). Collections that this class
* returns are immutable or unique to the call, so they should be safe.
* Stripe store splits the key space of the region into non-overlapping stripes, as well as
* some recent files that have all the keys (level 0). Each stripe contains a set of files.
* When L0 is compacted, it's split into the files corresponding to existing stripe boundaries,
* that can thus be added to stripes.
* When scan or get happens, it only has to read the files from the corresponding stripes.
* See StripeCompationPolicy on how the stripes are determined; this class doesn't care.
*
* This class should work together with StripeCompactionPolicy and StripeCompactor.
* With regard to how they work, we make at least the following (reasonable) assumptions:
* - Compaction produces one file per new stripe (if any); that is easy to change.
* - Compaction has one contiguous set of stripes both in and out, except if L0 is involved.
*/
@InterfaceAudience.Private
public class StripeStoreFileManager
implements StoreFileManager, StripeCompactionPolicy.StripeInformationProvider {
static final Log LOG = LogFactory.getLog(StripeStoreFileManager.class);
/**
* The file metadata fields that contain the stripe information.
*/
public static final byte[] STRIPE_START_KEY = Bytes.toBytes("STRIPE_START_KEY");
public static final byte[] STRIPE_END_KEY = Bytes.toBytes("STRIPE_END_KEY");
private final static Bytes.RowEndKeyComparator MAP_COMPARATOR = new Bytes.RowEndKeyComparator();
/**
* The key value used for range boundary, indicating that the boundary is open (i.e. +-inf).
*/
public final static byte[] OPEN_KEY = HConstants.EMPTY_BYTE_ARRAY;
final static byte[] INVALID_KEY = null;
/**
* The state class. Used solely to replace results atomically during
* compactions and avoid complicated error handling.
*/
private static class State {
/**
* The end rows of each stripe. The last stripe end is always open-ended, so it's not stored
* here. It is invariant that the start row of the stripe is the end row of the previous one
* (and is an open boundary for the first one).
*/
public byte[][] stripeEndRows = new byte[0][];
/**
* Files by stripe. Each element of the list corresponds to stripeEndRow element with the
* same index, except the last one. Inside each list, the files are in reverse order by
* seqNum. Note that the length of this is one higher than that of stripeEndKeys.
*/
public ArrayList<ImmutableList<StoreFile>> stripeFiles
= new ArrayList<ImmutableList<StoreFile>>();
/** Level 0. The files are in reverse order by seqNum. */
public ImmutableList<StoreFile> level0Files = ImmutableList.<StoreFile>of();
/** Cached list of all files in the structure, to return from some calls */
public ImmutableList<StoreFile> allFilesCached = ImmutableList.<StoreFile>of();
}
private State state = null;
/** Cached file metadata (or overrides as the case may be) */
private HashMap<StoreFile, byte[]> fileStarts = new HashMap<StoreFile, byte[]>();
private HashMap<StoreFile, byte[]> fileEnds = new HashMap<StoreFile, byte[]>();
/** Normally invalid key is null, but in the map null is the result for "no key"; so use
* the following constant value in these maps instead. Note that this is a constant and
* we use it to compare by reference when we read from the map. */
private static final byte[] INVALID_KEY_IN_MAP = new byte[0];
private final KVComparator kvComparator;
private StripeStoreConfig config;
private final int blockingFileCount;
public StripeStoreFileManager(
KVComparator kvComparator, Configuration conf, StripeStoreConfig config) {
this.kvComparator = kvComparator;
this.config = config;
this.blockingFileCount = conf.getInt(
HStore.BLOCKING_STOREFILES_KEY, HStore.DEFAULT_BLOCKING_STOREFILE_COUNT);
}
@Override
public void loadFiles(List<StoreFile> storeFiles) {
loadUnclassifiedStoreFiles(storeFiles);
}
@Override
public Collection<StoreFile> getStorefiles() {
return state.allFilesCached;
}
@Override
public void insertNewFiles(Collection<StoreFile> sfs) throws IOException {
CompactionOrFlushMergeCopy cmc = new CompactionOrFlushMergeCopy(true);
cmc.mergeResults(null, sfs);
debugDumpState("Added new files");
}
@Override
public ImmutableCollection<StoreFile> clearFiles() {
ImmutableCollection<StoreFile> result = state.allFilesCached;
this.state = new State();
this.fileStarts.clear();
this.fileEnds.clear();
return result;
}
@Override
public int getStorefileCount() {
return state.allFilesCached.size();
}
/** See {@link StoreFileManager#getCandidateFilesForRowKeyBefore(KeyValue)}
* for details on this methods. */
@Override
public Iterator<StoreFile> getCandidateFilesForRowKeyBefore(final KeyValue targetKey) {
KeyBeforeConcatenatedLists result = new KeyBeforeConcatenatedLists();
// Order matters for this call.
result.addSublist(state.level0Files);
if (!state.stripeFiles.isEmpty()) {
int lastStripeIndex = findStripeForRow(targetKey.getRow(), false);
for (int stripeIndex = lastStripeIndex; stripeIndex >= 0; --stripeIndex) {
result.addSublist(state.stripeFiles.get(stripeIndex));
}
}
return result.iterator();
}
/** See {@link StoreFileManager#getCandidateFilesForRowKeyBefore(KeyValue)} and
* {@link StoreFileManager#updateCandidateFilesForRowKeyBefore(Iterator, KeyValue, KeyValue)}
* for details on this methods. */
@Override
public Iterator<StoreFile> updateCandidateFilesForRowKeyBefore(
Iterator<StoreFile> candidateFiles, final KeyValue targetKey, final KeyValue candidate) {
KeyBeforeConcatenatedLists.Iterator original =
(KeyBeforeConcatenatedLists.Iterator)candidateFiles;
assert original != null;
ArrayList<List<StoreFile>> components = original.getComponents();
for (int firstIrrelevant = 0; firstIrrelevant < components.size(); ++firstIrrelevant) {
StoreFile sf = components.get(firstIrrelevant).get(0);
byte[] endKey = endOf(sf);
// Entries are ordered as such: L0, then stripes in reverse order. We never remove
// level 0; we remove the stripe, and all subsequent ones, as soon as we find the
// first one that cannot possibly have better candidates.
if (!isInvalid(endKey) && !isOpen(endKey)
&& (nonOpenRowCompare(endKey, targetKey.getRow()) <= 0)) {
original.removeComponents(firstIrrelevant);
break;
}
}
return original;
}
@Override
/**
* Override of getSplitPoint that determines the split point as the boundary between two
* stripes, unless it causes significant imbalance between split sides' sizes. In that
* case, the split boundary will be chosen from the middle of one of the stripes to
* minimize imbalance.
* @return The split point, or null if no split is possible.
*/
public byte[] getSplitPoint() throws IOException {
if (this.getStorefileCount() == 0) return null;
if (state.stripeFiles.size() <= 1) {
return getSplitPointFromAllFiles();
}
int leftIndex = -1, rightIndex = state.stripeFiles.size();
long leftSize = 0, rightSize = 0;
long lastLeftSize = 0, lastRightSize = 0;
while (rightIndex - 1 != leftIndex) {
if (leftSize >= rightSize) {
--rightIndex;
lastRightSize = getStripeFilesSize(rightIndex);
rightSize += lastRightSize;
} else {
++leftIndex;
lastLeftSize = getStripeFilesSize(leftIndex);
leftSize += lastLeftSize;
}
}
if (leftSize == 0 || rightSize == 0) {
String errMsg = String.format("Cannot split on a boundary - left index %d size %d, "
+ "right index %d size %d", leftIndex, leftSize, rightIndex, rightSize);
debugDumpState(errMsg);
LOG.warn(errMsg);
return getSplitPointFromAllFiles();
}
double ratio = (double)rightSize / leftSize;
if (ratio < 1) {
ratio = 1 / ratio;
}
if (config.getMaxSplitImbalance() > ratio) return state.stripeEndRows[leftIndex];
// If the difference between the sides is too large, we could get the proportional key on
// the a stripe to equalize the difference, but there's no proportional key method at the
// moment, and it's not extremely important.
// See if we can achieve better ratio if we split the bigger side in half.
boolean isRightLarger = rightSize >= leftSize;
double newRatio = isRightLarger
? getMidStripeSplitRatio(leftSize, rightSize, lastRightSize)
: getMidStripeSplitRatio(rightSize, leftSize, lastLeftSize);
if (newRatio < 1) {
newRatio = 1 / newRatio;
}
if (newRatio >= ratio) return state.stripeEndRows[leftIndex];
LOG.debug("Splitting the stripe - ratio w/o split " + ratio + ", ratio with split "
+ newRatio + " configured ratio " + config.getMaxSplitImbalance());
// Ok, we may get better ratio, get it.
return StoreUtils.getLargestFile(state.stripeFiles.get(
isRightLarger ? rightIndex : leftIndex)).getFileSplitPoint(this.kvComparator);
}
private byte[] getSplitPointFromAllFiles() throws IOException {
ConcatenatedLists<StoreFile> sfs = new ConcatenatedLists<StoreFile>();
sfs.addSublist(state.level0Files);
sfs.addAllSublists(state.stripeFiles);
if (sfs.isEmpty()) return null;
return StoreUtils.getLargestFile(sfs).getFileSplitPoint(this.kvComparator);
}
private double getMidStripeSplitRatio(long smallerSize, long largerSize, long lastLargerSize) {
return (double)(largerSize - lastLargerSize / 2f) / (smallerSize + lastLargerSize / 2f);
}
@Override
public Collection<StoreFile> getFilesForScanOrGet(
boolean isGet, byte[] startRow, byte[] stopRow) {
if (state.stripeFiles.isEmpty()) {
return state.level0Files; // There's just L0.
}
int firstStripe = findStripeForRow(startRow, true);
int lastStripe = findStripeForRow(stopRow, false);
assert firstStripe <= lastStripe;
if (firstStripe == lastStripe && state.level0Files.isEmpty()) {
return state.stripeFiles.get(firstStripe); // There's just one stripe we need.
}
if (firstStripe == 0 && lastStripe == (state.stripeFiles.size() - 1)) {
return state.allFilesCached; // We need to read all files.
}
ConcatenatedLists<StoreFile> result = new ConcatenatedLists<StoreFile>();
result.addAllSublists(state.stripeFiles.subList(firstStripe, lastStripe + 1));
result.addSublist(state.level0Files);
return result;
}
@Override
public void addCompactionResults(
Collection<StoreFile> compactedFiles, Collection<StoreFile> results) throws IOException {
// See class comment for the assumptions we make here.
LOG.debug("Attempting to merge compaction results: " + compactedFiles.size()
+ " files replaced by " + results.size());
// In order to be able to fail in the middle of the operation, we'll operate on lazy
// copies and apply the result at the end.
CompactionOrFlushMergeCopy cmc = new CompactionOrFlushMergeCopy(false);
cmc.mergeResults(compactedFiles, results);
debugDumpState("Merged compaction results");
}
@Override
public int getStoreCompactionPriority() {
// If there's only L0, do what the default store does.
// If we are in critical priority, do the same - we don't want to trump all stores all
// the time due to how many files we have.
int fc = getStorefileCount();
if (state.stripeFiles.isEmpty() || (this.blockingFileCount <= fc)) {
return this.blockingFileCount - fc;
}
// If we are in good shape, we don't want to be trumped by all other stores due to how
// many files we have, so do an approximate mapping to normal priority range; L0 counts
// for all stripes.
int l0 = state.level0Files.size(), sc = state.stripeFiles.size();
int priority = (int)Math.ceil(((double)(this.blockingFileCount - fc + l0) / sc) - l0);
return (priority <= HStore.PRIORITY_USER) ? (HStore.PRIORITY_USER + 1) : priority;
}
/**
* Gets the total size of all files in the stripe.
* @param stripeIndex Stripe index.
* @return Size.
*/
private long getStripeFilesSize(int stripeIndex) {
long result = 0;
for (StoreFile sf : state.stripeFiles.get(stripeIndex)) {
result += sf.getReader().length();
}
return result;
}
/**
* Loads initial store files that were picked up from some physical location pertaining to
* this store (presumably). Unlike adding files after compaction, assumes empty initial
* sets, and is forgiving with regard to stripe constraints - at worst, many/all files will
* go to level 0.
* @param storeFiles Store files to add.
*/
private void loadUnclassifiedStoreFiles(List<StoreFile> storeFiles) {
LOG.debug("Attempting to load " + storeFiles.size() + " store files.");
TreeMap<byte[], ArrayList<StoreFile>> candidateStripes =
new TreeMap<byte[], ArrayList<StoreFile>>(MAP_COMPARATOR);
ArrayList<StoreFile> level0Files = new ArrayList<StoreFile>();
// Separate the files into tentative stripes; then validate. Currently, we rely on metadata.
// If needed, we could dynamically determine the stripes in future.
for (StoreFile sf : storeFiles) {
byte[] startRow = startOf(sf), endRow = endOf(sf);
// Validate the range and put the files into place.
if (isInvalid(startRow) || isInvalid(endRow)) {
insertFileIntoStripe(level0Files, sf); // No metadata - goes to L0.
ensureLevel0Metadata(sf);
} else if (!isOpen(startRow) && !isOpen(endRow) &&
nonOpenRowCompare(startRow, endRow) >= 0) {
LOG.error("Unexpected metadata - start row [" + Bytes.toString(startRow) + "], end row ["
+ Bytes.toString(endRow) + "] in file [" + sf.getPath() + "], pushing to L0");
insertFileIntoStripe(level0Files, sf); // Bad metadata - goes to L0 also.
ensureLevel0Metadata(sf);
} else {
ArrayList<StoreFile> stripe = candidateStripes.get(endRow);
if (stripe == null) {
stripe = new ArrayList<StoreFile>();
candidateStripes.put(endRow, stripe);
}
insertFileIntoStripe(stripe, sf);
}
}
// Possible improvement - for variable-count stripes, if all the files are in L0, we can
// instead create single, open-ended stripe with all files.
boolean hasOverlaps = false;
byte[] expectedStartRow = null; // first stripe can start wherever
Iterator<Map.Entry<byte[], ArrayList<StoreFile>>> entryIter =
candidateStripes.entrySet().iterator();
while (entryIter.hasNext()) {
Map.Entry<byte[], ArrayList<StoreFile>> entry = entryIter.next();
ArrayList<StoreFile> files = entry.getValue();
// Validate the file start rows, and remove the bad ones to level 0.
for (int i = 0; i < files.size(); ++i) {
StoreFile sf = files.get(i);
byte[] startRow = startOf(sf);
if (expectedStartRow == null) {
expectedStartRow = startRow; // ensure that first stripe is still consistent
} else if (!rowEquals(expectedStartRow, startRow)) {
hasOverlaps = true;
LOG.warn("Store file doesn't fit into the tentative stripes - expected to start at ["
+ Bytes.toString(expectedStartRow) + "], but starts at [" + Bytes.toString(startRow)
+ "], to L0 it goes");
StoreFile badSf = files.remove(i);
insertFileIntoStripe(level0Files, badSf);
ensureLevel0Metadata(badSf);
--i;
}
}
// Check if any files from the candidate stripe are valid. If so, add a stripe.
byte[] endRow = entry.getKey();
if (!files.isEmpty()) {
expectedStartRow = endRow; // Next stripe must start exactly at that key.
} else {
entryIter.remove();
}
}
// In the end, there must be open ends on two sides. If not, and there were no errors i.e.
// files are consistent, they might be coming from a split. We will treat the boundaries
// as open keys anyway, and log the message.
// If there were errors, we'll play it safe and dump everything into L0.
if (!candidateStripes.isEmpty()) {
StoreFile firstFile = candidateStripes.firstEntry().getValue().get(0);
boolean isOpen = isOpen(startOf(firstFile)) && isOpen(candidateStripes.lastKey());
if (!isOpen) {
LOG.warn("The range of the loaded files does not cover full key space: from ["
+ Bytes.toString(startOf(firstFile)) + "], to ["
+ Bytes.toString(candidateStripes.lastKey()) + "]");
if (!hasOverlaps) {
ensureEdgeStripeMetadata(candidateStripes.firstEntry().getValue(), true);
ensureEdgeStripeMetadata(candidateStripes.lastEntry().getValue(), false);
} else {
LOG.warn("Inconsistent files, everything goes to L0.");
for (ArrayList<StoreFile> files : candidateStripes.values()) {
for (StoreFile sf : files) {
insertFileIntoStripe(level0Files, sf);
ensureLevel0Metadata(sf);
}
}
candidateStripes.clear();
}
}
}
// Copy the results into the fields.
State state = new State();
state.level0Files = ImmutableList.copyOf(level0Files);
state.stripeFiles = new ArrayList<ImmutableList<StoreFile>>(candidateStripes.size());
state.stripeEndRows = new byte[Math.max(0, candidateStripes.size() - 1)][];
ArrayList<StoreFile> newAllFiles = new ArrayList<StoreFile>(level0Files);
int i = candidateStripes.size() - 1;
for (Map.Entry<byte[], ArrayList<StoreFile>> entry : candidateStripes.entrySet()) {
state.stripeFiles.add(ImmutableList.copyOf(entry.getValue()));
newAllFiles.addAll(entry.getValue());
if (i > 0) {
state.stripeEndRows[state.stripeFiles.size() - 1] = entry.getKey();
}
--i;
}
state.allFilesCached = ImmutableList.copyOf(newAllFiles);
this.state = state;
debugDumpState("Files loaded");
}
private void ensureEdgeStripeMetadata(ArrayList<StoreFile> stripe, boolean isFirst) {
HashMap<StoreFile, byte[]> targetMap = isFirst ? fileStarts : fileEnds;
for (StoreFile sf : stripe) {
targetMap.put(sf, OPEN_KEY);
}
}
private void ensureLevel0Metadata(StoreFile sf) {
if (!isInvalid(startOf(sf))) this.fileStarts.put(sf, INVALID_KEY_IN_MAP);
if (!isInvalid(endOf(sf))) this.fileEnds.put(sf, INVALID_KEY_IN_MAP);
}
private void debugDumpState(String string) {
if (!LOG.isDebugEnabled()) return;
StringBuilder sb = new StringBuilder();
sb.append("\n" + string + "; current stripe state is as such:");
sb.append("\n level 0 with ").append(state.level0Files.size()).append(" files;");
for (int i = 0; i < state.stripeFiles.size(); ++i) {
String endRow = (i == state.stripeEndRows.length)
? "(end)" : "[" + Bytes.toString(state.stripeEndRows[i]) + "]";
sb.append("\n stripe ending in ").append(endRow).append(" with ")
.append(state.stripeFiles.get(i).size()).append(" files;");
}
sb.append("\n").append(getStorefileCount()).append(" files total.");
LOG.debug(sb.toString());
}
/**
* Checks whether the key indicates an open interval boundary (i.e. infinity).
*/
private static final boolean isOpen(byte[] key) {
return key != null && key.length == 0;
}
/**
* Checks whether the key is invalid (e.g. from an L0 file, or non-stripe-compacted files).
*/
private static final boolean isInvalid(byte[] key) {
return key == INVALID_KEY;
}
/**
* Compare two keys for equality.
*/
private final boolean rowEquals(byte[] k1, byte[] k2) {
return kvComparator.matchingRows(k1, 0, k1.length, k2, 0, k2.length);
}
/**
* Compare two keys. Keys must not be open (isOpen(row) == false).
*/
private final int nonOpenRowCompare(byte[] k1, byte[] k2) {
assert !isOpen(k1) && !isOpen(k2);
return kvComparator.compareRows(k1, 0, k1.length, k2, 0, k2.length);
}
/**
* Finds the stripe index by end row.
*/
private final int findStripeIndexByEndRow(byte[] endRow) {
assert !isInvalid(endRow);
if (isOpen(endRow)) return state.stripeEndRows.length;
return Arrays.binarySearch(state.stripeEndRows, endRow, Bytes.BYTES_COMPARATOR);
}
/**
* Finds the stripe index for the stripe containing a row provided externally for get/scan.
*/
private final int findStripeForRow(byte[] row, boolean isStart) {
if (isStart && row == HConstants.EMPTY_START_ROW) return 0;
if (!isStart && row == HConstants.EMPTY_END_ROW) return state.stripeFiles.size() - 1;
// If there's an exact match below, a stripe ends at "row". Stripe right boundary is
// exclusive, so that means the row is in the next stripe; thus, we need to add one to index.
// If there's no match, the return value of binarySearch is (-(insertion point) - 1), where
// insertion point is the index of the next greater element, or list size if none. The
// insertion point happens to be exactly what we need, so we need to add one to the result.
return Math.abs(Arrays.binarySearch(state.stripeEndRows, row, Bytes.BYTES_COMPARATOR) + 1);
}
@Override
public final byte[] getStartRow(int stripeIndex) {
return (stripeIndex == 0 ? OPEN_KEY : state.stripeEndRows[stripeIndex - 1]);
}
@Override
public final byte[] getEndRow(int stripeIndex) {
return (stripeIndex == state.stripeEndRows.length
? OPEN_KEY : state.stripeEndRows[stripeIndex]);
}
private byte[] startOf(StoreFile sf) {
byte[] result = this.fileStarts.get(sf);
return result == null ? sf.getMetadataValue(STRIPE_START_KEY)
: (result == INVALID_KEY_IN_MAP ? INVALID_KEY : result);
}
private byte[] endOf(StoreFile sf) {
byte[] result = this.fileEnds.get(sf);
return result == null ? sf.getMetadataValue(STRIPE_END_KEY)
: (result == INVALID_KEY_IN_MAP ? INVALID_KEY : result);
}
/**
* Inserts a file in the correct place (by seqnum) in a stripe copy.
* @param stripe Stripe copy to insert into.
* @param sf File to insert.
*/
private static void insertFileIntoStripe(ArrayList<StoreFile> stripe, StoreFile sf) {
// The only operation for which sorting of the files matters is KeyBefore. Therefore,
// we will store the file in reverse order by seqNum from the outset.
for (int insertBefore = 0; ; ++insertBefore) {
if (insertBefore == stripe.size()
|| (StoreFile.Comparators.SEQ_ID.compare(sf, stripe.get(insertBefore)) >= 0)) {
stripe.add(insertBefore, sf);
break;
}
}
}
/**
* An extension of ConcatenatedLists that has several peculiar properties.
* First, one can cut the tail of the logical list by removing last several sub-lists.
* Second, items can be removed thru iterator.
* Third, if the sub-lists are immutable, they are replaced with mutable copies when needed.
* On average KeyBefore operation will contain half the stripes as potential candidates,
* but will quickly cut down on them as it finds something in the more likely ones; thus,
* the above allow us to avoid unnecessary copying of a bunch of lists.
*/
private static class KeyBeforeConcatenatedLists extends ConcatenatedLists<StoreFile> {
@Override
public java.util.Iterator<StoreFile> iterator() {
return new Iterator();
}
public class Iterator extends ConcatenatedLists<StoreFile>.Iterator {
public ArrayList<List<StoreFile>> getComponents() {
return components;
}
public void removeComponents(int startIndex) {
List<List<StoreFile>> subList = components.subList(startIndex, components.size());
for (List<StoreFile> entry : subList) {
size -= entry.size();
}
assert size >= 0;
subList.clear();
}
@Override
public void remove() {
if (!this.nextWasCalled) {
throw new IllegalStateException("No element to remove");
}
this.nextWasCalled = false;
List<StoreFile> src = components.get(currentComponent);
if (src instanceof ImmutableList<?>) {
src = new ArrayList<StoreFile>(src);
components.set(currentComponent, src);
}
src.remove(indexWithinComponent);
--size;
--indexWithinComponent;
if (src.isEmpty()) {
components.remove(currentComponent); // indexWithinComponent is already -1 here.
}
}
}
}
/**
* Non-static helper class for merging compaction or flush results.
* Since we want to merge them atomically (more or less), it operates on lazy copies,
* then creates a new state object and puts it in place.
*/
private class CompactionOrFlushMergeCopy {
private ArrayList<List<StoreFile>> stripeFiles = null;
private ArrayList<StoreFile> level0Files = null;
private ArrayList<byte[]> stripeEndRows = null;
private Collection<StoreFile> compactedFiles = null;
private Collection<StoreFile> results = null;
private List<StoreFile> l0Results = new ArrayList<StoreFile>();
private final boolean isFlush;
public CompactionOrFlushMergeCopy(boolean isFlush) {
// Create a lazy mutable copy (other fields are so lazy they start out as nulls).
this.stripeFiles = new ArrayList<List<StoreFile>>(
StripeStoreFileManager.this.state.stripeFiles);
this.isFlush = isFlush;
}
public void mergeResults(Collection<StoreFile> compactedFiles, Collection<StoreFile> results)
throws IOException {
assert this.compactedFiles == null && this.results == null;
this.compactedFiles = compactedFiles;
this.results = results;
// Do logical processing.
if (!isFlush) removeCompactedFiles();
TreeMap<byte[], StoreFile> newStripes = processResults();
if (newStripes != null) {
processNewCandidateStripes(newStripes);
}
// Create new state and update parent.
State state = createNewState();
StripeStoreFileManager.this.state = state;
updateMetadataMaps();
}
private State createNewState() {
State oldState = StripeStoreFileManager.this.state;
// Stripe count should be the same unless the end rows changed.
assert oldState.stripeFiles.size() == this.stripeFiles.size() || this.stripeEndRows != null;
State newState = new State();
newState.level0Files = (this.level0Files == null) ? oldState.level0Files
: ImmutableList.copyOf(this.level0Files);
newState.stripeEndRows = (this.stripeEndRows == null) ? oldState.stripeEndRows
: this.stripeEndRows.toArray(new byte[this.stripeEndRows.size()][]);
newState.stripeFiles = new ArrayList<ImmutableList<StoreFile>>(this.stripeFiles.size());
for (List<StoreFile> newStripe : this.stripeFiles) {
newState.stripeFiles.add(newStripe instanceof ImmutableList<?>
? (ImmutableList<StoreFile>)newStripe : ImmutableList.copyOf(newStripe));
}
List<StoreFile> newAllFiles = new ArrayList<StoreFile>(oldState.allFilesCached);
if (!isFlush) newAllFiles.removeAll(compactedFiles);
newAllFiles.addAll(results);
newState.allFilesCached = ImmutableList.copyOf(newAllFiles);
return newState;
}
private void updateMetadataMaps() {
StripeStoreFileManager parent = StripeStoreFileManager.this;
if (!isFlush) {
for (StoreFile sf : this.compactedFiles) {
parent.fileStarts.remove(sf);
parent.fileEnds.remove(sf);
}
}
if (this.l0Results != null) {
for (StoreFile sf : this.l0Results) {
parent.ensureLevel0Metadata(sf);
}
}
}
/**
* @param index Index of the stripe we need.
* @return A lazy stripe copy from current stripes.
*/
private final ArrayList<StoreFile> getStripeCopy(int index) {
List<StoreFile> stripeCopy = this.stripeFiles.get(index);
ArrayList<StoreFile> result = null;
if (stripeCopy instanceof ImmutableList<?>) {
result = new ArrayList<StoreFile>(stripeCopy);
this.stripeFiles.set(index, result);
} else {
result = (ArrayList<StoreFile>)stripeCopy;
}
return result;
}
/**
* @return A lazy L0 copy from current state.
*/
private final ArrayList<StoreFile> getLevel0Copy() {
if (this.level0Files == null) {
this.level0Files = new ArrayList<StoreFile>(StripeStoreFileManager.this.state.level0Files);
}
return this.level0Files;
}
/**
* Process new files, and add them either to the structure of existing stripes,
* or to the list of new candidate stripes.
* @return New candidate stripes.
*/
private TreeMap<byte[], StoreFile> processResults() throws IOException {
TreeMap<byte[], StoreFile> newStripes = null;
for (StoreFile sf : this.results) {
byte[] startRow = startOf(sf), endRow = endOf(sf);
if (isInvalid(endRow) || isInvalid(startRow)) {
if (!isFlush) {
LOG.warn("The newly compacted file doesn't have stripes set: " + sf.getPath());
}
insertFileIntoStripe(getLevel0Copy(), sf);
this.l0Results.add(sf);
continue;
}
if (!this.stripeFiles.isEmpty()) {
int stripeIndex = findStripeIndexByEndRow(endRow);
if ((stripeIndex >= 0) && rowEquals(getStartRow(stripeIndex), startRow)) {
// Simple/common case - add file to an existing stripe.
insertFileIntoStripe(getStripeCopy(stripeIndex), sf);
continue;
}
}
// Make a new candidate stripe.
if (newStripes == null) {
newStripes = new TreeMap<byte[], StoreFile>(MAP_COMPARATOR);
}
StoreFile oldSf = newStripes.put(endRow, sf);
if (oldSf != null) {
throw new IOException("Compactor has produced multiple files for the stripe ending in ["
+ Bytes.toString(endRow) + "], found " + sf.getPath() + " and " + oldSf.getPath());
}
}
return newStripes;
}
/**
* Remove compacted files.
* @param compactedFiles Compacted files.
*/
private void removeCompactedFiles() throws IOException {
for (StoreFile oldFile : this.compactedFiles) {
byte[] oldEndRow = endOf(oldFile);
List<StoreFile> source = null;
if (isInvalid(oldEndRow)) {
source = getLevel0Copy();
} else {
int stripeIndex = findStripeIndexByEndRow(oldEndRow);
if (stripeIndex < 0) {
throw new IOException("An allegedly compacted file [" + oldFile + "] does not belong"
+ " to a known stripe (end row - [" + Bytes.toString(oldEndRow) + "])");
}
source = getStripeCopy(stripeIndex);
}
if (!source.remove(oldFile)) {
throw new IOException("An allegedly compacted file [" + oldFile + "] was not found");
}
}
}
/**
* See {@link #addCompactionResults(Collection, Collection)} - updates the stripe list with
* new candidate stripes/removes old stripes; produces new set of stripe end rows.
* @param newStripes New stripes - files by end row.
*/
private void processNewCandidateStripes(
TreeMap<byte[], StoreFile> newStripes) throws IOException {
// Validate that the removed and added aggregate ranges still make for a full key space.
boolean hasStripes = !this.stripeFiles.isEmpty();
this.stripeEndRows = new ArrayList<byte[]>(
Arrays.asList(StripeStoreFileManager.this.state.stripeEndRows));
int removeFrom = 0;
byte[] firstStartRow = startOf(newStripes.firstEntry().getValue());
byte[] lastEndRow = newStripes.lastKey();
if (!hasStripes && (!isOpen(firstStartRow) || !isOpen(lastEndRow))) {
throw new IOException("Newly created stripes do not cover the entire key space.");
}
boolean canAddNewStripes = true;
Collection<StoreFile> filesForL0 = null;
if (hasStripes) {
// Determine which stripes will need to be removed because they conflict with new stripes.
// The new boundaries should match old stripe boundaries, so we should get exact matches.
if (isOpen(firstStartRow)) {
removeFrom = 0;
} else {
removeFrom = findStripeIndexByEndRow(firstStartRow);
if (removeFrom < 0) throw new IOException("Compaction is trying to add a bad range.");
++removeFrom;
}
int removeTo = findStripeIndexByEndRow(lastEndRow);
if (removeTo < 0) throw new IOException("Compaction is trying to add a bad range.");
// See if there are files in the stripes we are trying to replace.
ArrayList<StoreFile> conflictingFiles = new ArrayList<StoreFile>();
for (int removeIndex = removeTo; removeIndex >= removeFrom; --removeIndex) {
conflictingFiles.addAll(this.stripeFiles.get(removeIndex));
}
if (!conflictingFiles.isEmpty()) {
// This can be caused by two things - concurrent flush into stripes, or a bug.
// Unfortunately, we cannot tell them apart without looking at timing or something
// like that. We will assume we are dealing with a flush and dump it into L0.
if (isFlush) {
long newSize = StripeCompactionPolicy.getTotalFileSize(newStripes.values());
LOG.warn("Stripes were created by a flush, but results of size " + newSize
+ " cannot be added because the stripes have changed");
canAddNewStripes = false;
filesForL0 = newStripes.values();
} else {
long oldSize = StripeCompactionPolicy.getTotalFileSize(conflictingFiles);
LOG.info(conflictingFiles.size() + " conflicting files (likely created by a flush) "
+ " of size " + oldSize + " are moved to L0 due to concurrent stripe change");
filesForL0 = conflictingFiles;
}
if (filesForL0 != null) {
for (StoreFile sf : filesForL0) {
insertFileIntoStripe(getLevel0Copy(), sf);
}
l0Results.addAll(filesForL0);
}
}
if (canAddNewStripes) {
// Remove old empty stripes.
int originalCount = this.stripeFiles.size();
for (int removeIndex = removeTo; removeIndex >= removeFrom; --removeIndex) {
if (removeIndex != originalCount - 1) {
this.stripeEndRows.remove(removeIndex);
}
this.stripeFiles.remove(removeIndex);
}
}
}
if (!canAddNewStripes) return; // Files were already put into L0.
// Now, insert new stripes. The total ranges match, so we can insert where we removed.
byte[] previousEndRow = null;
int insertAt = removeFrom;
for (Map.Entry<byte[], StoreFile> newStripe : newStripes.entrySet()) {
if (previousEndRow != null) {
// Validate that the ranges are contiguous.
assert !isOpen(previousEndRow);
byte[] startRow = startOf(newStripe.getValue());
if (!rowEquals(previousEndRow, startRow)) {
throw new IOException("The new stripes produced by "
+ (isFlush ? "flush" : "compaction") + " are not contiguous");
}
}
// Add the new stripe.
ArrayList<StoreFile> tmp = new ArrayList<StoreFile>();
tmp.add(newStripe.getValue());
stripeFiles.add(insertAt, tmp);
previousEndRow = newStripe.getKey();
if (!isOpen(previousEndRow)) {
stripeEndRows.add(insertAt, previousEndRow);
}
++insertAt;
}
}
}
@Override
public List<StoreFile> getLevel0Files() {
return this.state.level0Files;
}
@Override
public List<byte[]> getStripeBoundaries() {
if (this.state.stripeFiles.isEmpty()) return new ArrayList<byte[]>();
ArrayList<byte[]> result = new ArrayList<byte[]>(this.state.stripeEndRows.length + 2);
result.add(OPEN_KEY);
for (int i = 0; i < this.state.stripeEndRows.length; ++i) {
result.add(this.state.stripeEndRows[i]);
}
result.add(OPEN_KEY);
return result;
}
@Override
public ArrayList<ImmutableList<StoreFile>> getStripes() {
return this.state.stripeFiles;
}
@Override
public int getStripeCount() {
return this.state.stripeFiles.size();
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.idp.saml.sp;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.watcher.FileChangesListener;
import org.elasticsearch.watcher.FileWatcher;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.XPackPlugin;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
public class WildcardServiceProviderResolver {
public static final Setting<String> FILE_PATH_SETTING = Setting.simpleString("xpack.idp.sp.wildcard.path",
"wildcard_services.json", Setting.Property.NodeScope);
private class State {
final Map<String, WildcardServiceProvider> services;
final Cache<Tuple<String, String>, SamlServiceProvider> cache;
private State(Map<String, WildcardServiceProvider> services) {
this.services = services;
this.cache = ServiceProviderCacheSettings.buildCache(settings);
}
}
private static final Logger logger = LogManager.getLogger();
private final Settings settings;
private final ScriptService scriptService;
private final SamlServiceProviderFactory serviceProviderFactory;
private final AtomicReference<State> stateRef;
WildcardServiceProviderResolver(Settings settings, ScriptService scriptService, SamlServiceProviderFactory serviceProviderFactory) {
this.settings = settings;
this.scriptService = scriptService;
this.serviceProviderFactory = serviceProviderFactory;
this.stateRef = new AtomicReference<>(new State(Map.of()));
}
/**
* This is implemented as a factory method to facilitate testing - the core resolver just works on InputStreams, this method
* handles all the Path/ResourceWatcher logic
*/
public static WildcardServiceProviderResolver create(Environment environment,
ResourceWatcherService resourceWatcherService,
ScriptService scriptService,
SamlServiceProviderFactory spFactory) {
final Settings settings = environment.settings();
final Path path = XPackPlugin.resolveConfigFile(environment, FILE_PATH_SETTING.get(environment.settings()));
logger.info("Loading wildcard services from file [{}]", path.toAbsolutePath());
final WildcardServiceProviderResolver resolver = new WildcardServiceProviderResolver(settings, scriptService, spFactory);
if (Files.exists(path)) {
try {
resolver.reload(path);
} catch (IOException e) {
throw new ElasticsearchException("File [{}] (from setting [{}]) cannot be loaded",
e, path.toAbsolutePath(), FILE_PATH_SETTING.getKey());
}
} else if (FILE_PATH_SETTING.exists(environment.settings())) {
// A file was explicitly configured, but doesn't exist. That's a mistake...
throw new ElasticsearchException("File [{}] (from setting [{}]) does not exist",
path.toAbsolutePath(), FILE_PATH_SETTING.getKey());
}
final FileWatcher fileWatcher = new FileWatcher(path);
fileWatcher.addListener(new FileChangesListener() {
@Override
public void onFileCreated(Path file) {
onFileChanged(file);
}
@Override
public void onFileDeleted(Path file) {
onFileChanged(file);
}
@Override
public void onFileChanged(Path file) {
try {
resolver.reload(file);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
});
try {
resourceWatcherService.add(fileWatcher);
} catch (IOException e) {
throw new ElasticsearchException("Failed to watch file [{}] (from setting [{}])",
e, path.toAbsolutePath(), FILE_PATH_SETTING.getKey());
}
return resolver;
}
public SamlServiceProvider resolve(String entityId, String acs) {
final State currentState = stateRef.get();
Tuple<String, String> cacheKey = new Tuple<>(entityId, acs);
final SamlServiceProvider cached = currentState.cache.get(cacheKey);
if (cached != null) {
logger.trace("Service for [{}] [{}] is cached [{}]", entityId, acs, cached);
return cached;
}
final Map<String, SamlServiceProvider> matches = new HashMap<>();
currentState.services.forEach((name, wildcard) -> {
final SamlServiceProviderDocument doc = wildcard.apply(scriptService, entityId, acs);
if (doc != null) {
final SamlServiceProvider sp = serviceProviderFactory.buildServiceProvider(doc);
matches.put(name, sp);
}
});
switch (matches.size()) {
case 0:
logger.trace("No wildcard services found for [{}] [{}]", entityId, acs);
return null;
case 1:
final SamlServiceProvider serviceProvider = Iterables.get(matches.values(), 0);
logger.trace("Found exactly 1 wildcard service for [{}] [{}] - [{}]", entityId, acs, serviceProvider);
currentState.cache.put(cacheKey, serviceProvider);
return serviceProvider;
default:
final String names = Strings.collectionToCommaDelimitedString(matches.keySet());
logger.warn("Found multiple matching wildcard services for [{}] [{}] - [{}]", entityId, acs, names);
throw new IllegalStateException(
"Found multiple wildcard service providers for entity ID [" + entityId + "] and ACS [" + acs
+ "] - wildcard service names [" + names + "]");
}
}
// For testing
Map<String, WildcardServiceProvider> services() {
return stateRef.get().services;
}
// Accessible for testing
void reload(XContentParser parser) throws IOException {
final Map<String, WildcardServiceProvider> newServices = Map.copyOf(parse(parser));
final State oldState = this.stateRef.get();
if (newServices.equals(oldState.services) == false) {
// Services have changed
if (this.stateRef.compareAndSet(oldState, new State(newServices))) {
logger.info("Reloaded cached wildcard service providers, new providers [{}]",
Strings.collectionToCommaDelimitedString(newServices.keySet()));
} else {
// some other thread reloaded it
}
}
}
private void reload(Path file) throws IOException {
try (InputStream in = Files.newInputStream(file);
XContentParser parser = buildServicesParser(in)) {
reload(parser);
}
}
private static XContentParser buildServicesParser(InputStream in) throws IOException {
return XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, in);
}
private static Map<String, WildcardServiceProvider> parse(XContentParser parser) throws IOException {
final XContentParser.Token token = parser.currentToken() == null ? parser.nextToken() : parser.currentToken();
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
XContentParserUtils.ensureFieldName(parser, parser.nextToken(), Fields.SERVICES.getPreferredName());
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
final Map<String, WildcardServiceProvider> services = new HashMap<>();
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser);
String name = parser.currentName();
final XContentLocation location = parser.getTokenLocation();
try {
services.put(name, WildcardServiceProvider.parse(parser));
} catch (Exception e) {
throw new ParsingException(location, "failed to parse wildcard service [{}]", e, name);
}
}
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser);
return services;
}
public static Collection<? extends Setting<?>> getSettings() {
return List.of(FILE_PATH_SETTING);
}
public interface Fields {
ParseField SERVICES = new ParseField("services");
}
}
| |
package net.community.chest.ui.helpers.text;
import java.awt.Color;
import java.awt.event.KeyListener;
import java.util.Collection;
import javax.swing.BorderFactory;
import javax.swing.InputVerifier;
import javax.swing.border.Border;
import javax.swing.event.ChangeListener;
import javax.swing.text.Document;
import org.w3c.dom.Element;
import net.community.chest.lang.StringUtil;
import net.community.chest.swing.event.ChangeListenerSet;
import net.community.chest.ui.helpers.input.InputFieldValidator;
import net.community.chest.ui.helpers.input.InputFieldValidatorKeyListener;
import net.community.chest.ui.helpers.input.TextInputVerifier;
import net.community.chest.ui.helpers.input.ValidatorUtils;
public class InputPasswordField extends HelperPasswordField implements InputFieldValidator {
/**
*
*/
private static final long serialVersionUID = -3547798936268251396L;
public InputPasswordField ()
{
this((String) null);
}
public InputPasswordField (String text)
{
this(text, 0);
}
public InputPasswordField (int columns)
{
this(null, columns);
}
public InputPasswordField (String text, int columns)
{
this(null, text, columns);
}
public InputPasswordField (Document doc, String txt, int columns)
{
this(doc, txt, columns, true);
}
public InputPasswordField (String text, int columns, boolean autoLayout)
{
this((Document) null, text, columns, autoLayout);
}
public InputPasswordField (Document doc, String txt, int columns, boolean autoLayout)
{
this(null, doc, txt, columns, autoLayout);
}
public InputPasswordField (Element elem, Document doc, String txt, int columns)
{
this(elem, doc, txt, columns, true);
}
public InputPasswordField (Element elem)
{
this(elem, true);
}
public InputPasswordField (Element elem, boolean autoLayout)
{
this(elem, null, null, 0, autoLayout);
}
public InputPasswordField (Element elem, Document doc, String txt, int columns, boolean autoLayout)
{
super(elem, doc, txt, columns, autoLayout);
}
protected Border createStateBorder (Color c)
{
return (null == c) ? null : BorderFactory.createLineBorder(c, DEFAULT_LINE_THICKNESS);
}
private Color _errColor;
/*
* @see net.community.chest.ui.helpers.input.InputFieldValidator#getErrFieldColor()
*/
@Override
public Color getErrFieldColor ()
{
if (null == _errColor)
_errColor = DEFAULT_ERR_COLOR;
return _errColor;
}
private Border _errBorder /* =null */;
public Border getErrBorder ()
{
if (null == _errBorder)
_errBorder = createStateBorder(getErrFieldColor());
return _errBorder;
}
public void setErrBorder (Border b)
{
_errBorder = b;
}
/*
* @see net.community.chest.ui.helpers.input.InputFieldValidator#setErrFieldColor(java.awt.Color)
*/
@Override
public void setErrFieldColor (Color errColor)
{
if ((errColor != null) && (!errColor.equals(_errColor)))
{
_errColor = errColor;
// force re-creation on next update
setErrBorder(null);
}
}
private Color _okColor;
/*
* @see net.community.chest.ui.helpers.input.InputFieldValidator#getOkFieldColor()
*/
@Override
public Color getOkFieldColor ()
{
if (null == _okColor)
_okColor = DEFAULT_OK_COLOR;
return _okColor;
}
private Border _okBorder /* =null */;
public Border getOkBorder ()
{
// NOTE: OK border is empty by default whereas ERR border is auto-created
return _okBorder;
}
public void setOkBorder (Border b)
{
if (_okBorder != b)
_okBorder = b;
}
/*
* @see net.community.chest.ui.helpers.input.InputFieldValidator#setOkFieldColor(java.awt.Color)
*/
@Override
public void setOkFieldColor (Color okColor)
{
if ((okColor != null) && (!okColor.equals(_okColor)))
{
_okColor = okColor;
// force re-creation on next update
setOkBorder(null);
}
}
/* Invokes the {@link InputVerifier} (if any - otherwise returns true)
* @see net.community.chest.ui.helpers.input.InputFieldValidator#isValidData()
*/
@Override
public boolean isValidData ()
{
final InputVerifier v=getInputVerifier();
if (v != null)
return v.verify(this);
return true;
}
protected InputVerifier createDefaultVerifier ()
{
return TextInputVerifier.TEXT;
}
protected KeyListener createDefaultKeyListener ()
{
return new InputFieldValidatorKeyListener<InputPasswordField>(this);
}
/*
* @see net.community.chest.ui.helpers.text.HelperPasswordField#layoutComponent()
*/
@Override
public void layoutComponent () throws RuntimeException
{
super.layoutComponent();
InputVerifier v=getInputVerifier();
if (null == v)
{
if ((v=createDefaultVerifier()) != null)
setInputVerifier(v);
}
Border okBorder=getOkBorder();
if (null == okBorder)
setOkBorder(getBorder());
signalDataChanged(false);
final KeyListener kl=createDefaultKeyListener();
if (kl != null)
addKeyListener(kl);
}
/*
* @see javax.swing.JComponent#setBorder(javax.swing.border.Border)
*/
@Override
public void setBorder (Border b)
{
super.setBorder(b);
// force re-creation on next update
setErrBorder(null);
setOkBorder(null);
}
protected Border updateBorderColor (final boolean validData)
{
final Border b=ValidatorUtils.resolveValidatorBorder(this, validData, getOkBorder(), getErrBorder());
super.setBorder(b); // do not disturb the current OK/ERR borders
return b;
}
private Collection<ChangeListener> _cl;
protected int fireChangeEvent ()
{
return ChangeListenerSet.fireChangeEventForSource(this, _cl, true);
}
/*
* @see net.community.chest.ui.helpers.input.InputFieldValidator#addDataChangeListener(javax.swing.event.ChangeListener)
*/
@Override
public boolean addDataChangeListener (ChangeListener l)
{
if (null == l)
return false;
synchronized(this)
{
if (null == _cl)
_cl = new ChangeListenerSet();
}
synchronized(_cl)
{
return _cl.add(l);
}
}
/*
* @see net.community.chest.ui.helpers.input.InputFieldValidator#removeDataChangeListener(javax.swing.event.ChangeListener)
*/
@Override
public boolean removeDataChangeListener (ChangeListener l)
{
if (null == l)
return false;
synchronized(this)
{
if ((null == _cl) || (_cl.size() <= 0))
return false;
}
synchronized(_cl)
{
return _cl.remove(l);
}
}
/*
* @see net.community.chest.ui.helpers.input.InputFieldValidator#signalDataChanged(boolean)
*/
@Override
public int signalDataChanged (boolean fireEvent)
{
updateBorderColor(isValidData());
if (fireEvent)
return fireChangeEvent();
else
return 0;
}
/*
* @see net.community.chest.ui.helpers.input.InputFieldValidator#signalDataChanged()
*/
public void signalDataChanged ()
{
signalDataChanged(true);
}
public void setText (String t, boolean fireEvent)
{
super.setText(t);
if (fireEvent)
signalDataChanged();
}
/*
* @see javax.swing.text.JTextComponent#setText(java.lang.String)
*/
@SuppressWarnings("deprecation")
@Override
public void setText (String t)
{
setText(t, StringUtil.compareDataStrings(t, getText(), true) != 0);
}
}
| |
package cornell.trickleapp;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import cornell.trickleapp.R;
import cornell.trickleapp.R.id;
import cornell.trickleapp.R.layout;
import android.app.PendingIntent;
import android.appwidget.AppWidgetManager;
import android.appwidget.AppWidgetProvider;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.View;
import android.widget.RelativeLayout;
import android.widget.RemoteViews;
public class DrinkCounterAppWidget extends AppWidgetProvider{
private int drink_count = 0;
int start_color = 0x884D944D;
int offset = 10;
private DatabaseHandler db;
private double hours;
private int color;
static private double bac;
// TODO:Temporary, move to a class that makes more sense
private final Double CALORIES_PER_DRINK = 120.0;
private final Double CALORIES_PER_CHICKEN = 264.0;
private final Double CALORIES_PER_PIZZA = 285.0;
private final Double CALORIES_HOT_DOG = 250.0;
DrinkCounter counter=new DrinkCounter();
public final String counterAction = "Add Drink";
Intent intent;
static boolean checkSurveyed;
RelativeLayout widget;
static int[] neededIds;
@Override
public void onUpdate(Context context, AppWidgetManager appWidgetManager,
int[] appWidgetIds) {
neededIds=appWidgetIds;
final int N = appWidgetIds.length;
// TODO Auto-generated method stub
super.onUpdate(context, appWidgetManager, appWidgetIds);
// Perform this loop procedure for each App Widget that belongs to this provider
for (int i=0; i<N; i++) {
int appWidgetId = appWidgetIds[i];
SharedPreferences getPrefs=PreferenceManager.getDefaultSharedPreferences(context);
checkSurveyed=getPrefs.getBoolean("initialSurvey", false);
// Create an Intent to launch ExampleActivity
intent = new Intent(context, DrinkCounterAppWidget.class);
intent.setAction(counterAction);
PendingIntent pendingIntent = PendingIntent.getBroadcast(context, 0, intent, 0);
// Get the layout for the App Widget and attach an on-click listener
// to the button
RemoteViews views = new RemoteViews(context.getPackageName(), R.layout.counterwidget_layout);
views.setOnClickPendingIntent(R.id.bTestClick, pendingIntent);
intent = new Intent(context, MainActivity.class);
intent.setAction("Home");
PendingIntent homeIntent = PendingIntent.getActivity(context, 0, intent, 0);
views.setOnClickPendingIntent(R.id.bWidget, homeIntent);
views.setOnClickPendingIntent(R.id.ibWidgetWarning, homeIntent);
// Tell the AppWidgetManager to perform an update on the current app widget
appWidgetManager.updateAppWidget(appWidgetId, views);
}
}
@Override
public void onReceive(Context context, Intent intent) {
// TODO Auto-generated method stub
super.onReceive(context, intent);
String action=intent.getAction();
if (action.equals(counterAction)) {
// do some really cool stuff here
SharedPreferences getPrefs = PreferenceManager
.getDefaultSharedPreferences(context);
checkSurveyed = getPrefs.getBoolean("initialSurvey", false);
if (checkSurveyed == false) {
Intent i = new Intent();
i.setClassName("cornell.trickleapp", "cornell.trickleapp.InitialSurvey");
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(i);
} else {
db = new DatabaseHandler(context);
hadDrink();
AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(context);
RemoteViews views = new RemoteViews(context.getPackageName(), R.layout.counterwidget_layout);
//calculateColor();
if (bac>0.15){
views.setInt(R.id.ibWidgetWarning, "setBackgroundColor", color);
//views.setInt(R.id.ibWidgetWarning, "setColorFilter", color);
views.setInt(R.id.tvWidget, "setVisibility", View.VISIBLE);
views.setInt(R.id.tvWidgetHomeImage, "setVisibility", View.INVISIBLE);
}
else{
views.setInt(R.id.ibWidgetWarning, "setBackgroundColor", color);
views.setInt(R.id.tvWidget, "setVisibility", View.INVISIBLE);
views.setInt(R.id.tvWidgetHomeImage, "setVisibility", View.VISIBLE);
}
appWidgetManager.updateAppWidget(neededIds, views);
}
}
else if (action.equals("Home")){
Intent i = new Intent();
i.setClassName("cornell.trickleapp", "cornell.trickleapp.MainActivity");
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(i);
}
}
public void hadDrink() {
drink_count++;
if (drink_count == 1){
db.addValueTomorrow("drank_last_night", "True");
db.addValueTomorrow("tracked", "True");
db.updateOrAdd("drank", "True");
}
db.addDelayValue("drink_count", drink_count);
calculateBac();
db.addDelayValue("bac", String.valueOf(bac));
calculateColor();
db.addDelayValue("bac_color", String.valueOf(color));
//calculate number of chickens that equate the number of calories
Double drink_cals = drink_count * CALORIES_PER_DRINK;
int number_chickens = (int) Math.ceil(drink_cals / CALORIES_PER_CHICKEN);
db.updateOrAdd("number_chickens", number_chickens);
//calculate the number of slices of pizza that equate to the
//number of drinks consumed that day.
int number_pizza = (int) Math.ceil(drink_cals / CALORIES_PER_PIZZA);
db.updateOrAdd("number_pizza", number_pizza);
int number_hot_dogs = (int)Math.ceil(drink_cals/ CALORIES_HOT_DOG);
db.updateOrAdd("hot_dogs", number_hot_dogs);
/*
TextView check = new TextView(this);
check.setText(String.valueOf(bac));
check.setTextColor(Color.parseColor("#FFFFFF"));
((FrameLayout)parent_view).addView(check);
*/
}
private void calculateBac() {
Date date = new Date();
ArrayList<DatabaseStore> drink_count_vals = (ArrayList<DatabaseStore>) db
.getVarValuesDelay("drink_count", date);
if (drink_count_vals != null) {
calculateHours();
// get the users gender
ArrayList<DatabaseStore> stored_gender = (ArrayList<DatabaseStore>) db
.getAllVarValue("gender");
// If user did not set gender use "Female" as default
String gender = "Female";
if (stored_gender != null) {
gender = stored_gender.get(0).value;
}
// fetch the users weight
ArrayList<DatabaseStore> stored_weight = (ArrayList<DatabaseStore>) db
.getAllVarValue("weight");
Integer weight_lbs = 120;
if (stored_weight != null) {
weight_lbs = Integer.parseInt(stored_weight.get(0).value);
}
double metabolism_constant = 0;
double gender_constant = 0;
double weight_kilograms = weight_lbs * 0.453592;
if (gender.equals("Male")) {
metabolism_constant = 0.015;
gender_constant = 0.58;
} else {
metabolism_constant = 0.017;
gender_constant = 0.49;
}
bac = ((0.806 * drink_count * 1.2) / (gender_constant * weight_kilograms))
- (metabolism_constant * hours);
} else {
bac = 0;
}
}
private void calculateHours() {
Date date = new Date();
ArrayList<DatabaseStore> drink_count_vals = (ArrayList<DatabaseStore>) db
.getVarValuesDelay("drink_count", date);
GregorianCalendar gc = new GregorianCalendar();
gc.setTime(date);
gc.add(Calendar.HOUR_OF_DAY, -6);
date = gc.getTime();
DatabaseStore current = new DatabaseStore("","",date, "Integer");
color = start_color;
if (drink_count_vals != null) {
drink_count = drink_count_vals.size();
drink_count_vals = DatabaseStore.sortByTime(drink_count_vals);
// calculate the hours drinking
if (drink_count_vals.size() > 0) {
DatabaseStore start = drink_count_vals.get(0);
Integer start_time = start.hour * 60 + start.minute;
Integer last_time = current.hour * 60 + current.minute;
hours = (last_time - start_time) / 60.0;
}
}
}
public void calculateColor() {
if (bac < 0.06) {
color = start_color;
} else if (bac < 0.15) {
color = 0X88E68A2E;
} else if (bac < 0.24) {
color = 0X88A30000;
} else {
color = 0XCC000000;
}
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.framework.main;
import java.awt.Dimension;
import java.io.IOException;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.List;
import javax.swing.BorderFactory;
import javax.swing.border.Border;
import docking.wizard.*;
import ghidra.app.util.GenericHelpTopics;
import ghidra.framework.client.*;
import ghidra.framework.model.ProjectManager;
import ghidra.framework.model.ServerInfo;
import ghidra.framework.plugintool.PluginTool;
import ghidra.framework.remote.User;
import ghidra.util.HelpLocation;
import ghidra.util.exception.DuplicateNameException;
import ghidra.util.exception.UserAccessException;
/**
* Manage the panels for the wizard that shows server info and repository panels.
* The panel order is
* (1) Server Info
* (2) Repository panel
* (3) Project access panel (if user is creating a new repository)
* This panel manager is used when the project is being converted to a shared project and
* when a shared project's information is to change.
*/
class SetupProjectPanelManager implements PanelManager {
private WizardManager wizardMgr;
private String[] knownUsers;
private ServerInfoPanel serverPanel;
private RepositoryPanel repositoryPanel;
private ProjectAccessPanel projectAccessPanel;
private WizardPanel currentWizardPanel;
private boolean includeAnonymousAccessControl = false;
private ProjectManager projectMgr;
private RepositoryServerAdapter server;
private RepositoryAdapter repository;
private ServerInfo serverInfo;
private ServerInfo currentServerInfo;
private String statusMessage;
private PluginTool tool;
final static Border EMPTY_BORDER = BorderFactory.createEmptyBorder(80, 120, 0, 120);
SetupProjectPanelManager(PluginTool tool, ServerInfo serverInfo) {
serverPanel = new ServerInfoPanel(this);
serverPanel.setHelpLocation(
new HelpLocation(GenericHelpTopics.FRONT_END, "SetupServerInfo"));
projectMgr = tool.getProjectManager();
currentServerInfo = serverInfo;
this.tool = tool;
}
@Override
public boolean canFinish() {
if (repositoryPanel == null) {
return false;
}
if (repositoryPanel.isValidInformation()) {
if (repositoryPanel.createRepository()) {
return projectAccessPanel == null || projectAccessPanel.isValidInformation();
}
return true;
}
return false;
}
@Override
public boolean hasNextPanel() {
if (currentWizardPanel == serverPanel) {
return true;
}
if (currentWizardPanel == repositoryPanel && repositoryPanel.createRepository()) {
return true;
}
return false;
}
@Override
public boolean hasPreviousPanel() {
return currentWizardPanel != serverPanel;
}
@Override
public WizardPanel getInitialPanel() {
currentWizardPanel = serverPanel;
return currentWizardPanel;
}
@Override
public WizardPanel getNextPanel() {
if (currentWizardPanel == null) {
currentWizardPanel = serverPanel;
if (currentServerInfo != null) {
serverPanel.setServerInfo(currentServerInfo);
}
else {
serverPanel.setServerInfo(projectMgr.getMostRecentServerInfo());
}
}
else if (currentWizardPanel == serverPanel) {
String serverName = serverPanel.getServerName();
int portNumber = serverPanel.getPortNumber();
if (!isServerInfoValid(serverName, portNumber)) {
return serverPanel;
}
try {
knownUsers = server.getAllUsers();
String[] repositoryNames = server.getRepositoryNames();
includeAnonymousAccessControl = server.anonymousAccessAllowed();
if (repositoryPanel == null) {
repositoryPanel =
new RepositoryPanel(this, serverName, repositoryNames, server.isReadOnly());
repositoryPanel.setHelpLocation(
new HelpLocation(GenericHelpTopics.FRONT_END, "ChangeRepository"));
}
currentWizardPanel = repositoryPanel;
}
catch (RemoteException e) {
statusMessage = "Error accessing remote server on " + serverName;
}
catch (NotConnectedException e) {
statusMessage = e.getMessage();
if (statusMessage == null) {
statusMessage = "Not connected to server " + serverName + ": " + e;
}
}
catch (IOException e) {
statusMessage = "IOException: could not access remote server on " + serverName;
}
}
else if (currentWizardPanel == repositoryPanel) {
String repositoryName = repositoryPanel.getRepositoryName();
if (!repositoryPanel.createRepository()) {
currentWizardPanel = null;
repository = server.getRepository(repositoryName);
return currentWizardPanel;
}
checkNewRepositoryAccessPanel();
currentWizardPanel = projectAccessPanel;
}
else if (currentWizardPanel == projectAccessPanel) {
currentWizardPanel = null;
}
return currentWizardPanel;
}
@Override
public WizardPanel getPreviousPanel() {
if (currentWizardPanel == projectAccessPanel) {
currentWizardPanel = repositoryPanel;
}
else if (currentWizardPanel == repositoryPanel) {
currentWizardPanel = serverPanel;
}
else {
currentWizardPanel = null;
}
return currentWizardPanel;
}
@Override
public String getStatusMessage() {
String msg = statusMessage;
statusMessage = null;
return msg;
}
@Override
public void finish() {
if (server != null) {
boolean createNewRepository = repositoryPanel.createRepository();
if (!createNewRepository) {
if (repository == null) {
repository = server.getRepository(repositoryPanel.getRepositoryName());
}
}
else {
try {
String repositoryName = repositoryPanel.getRepositoryName();
boolean allowAnonymousAccess;
User[] accessList;
if (projectAccessPanel != null &&
projectAccessPanel.getRepositoryName().equals(repositoryName)) {
accessList = projectAccessPanel.getProjectUsers();
allowAnonymousAccess = projectAccessPanel.allowAnonymousAccess();
}
else {
accessList = new User[] { new User(server.getUser(), User.ADMIN) };
allowAnonymousAccess = false;
}
repository = server.createRepository(repositoryName);
repository.setUserList(accessList, allowAnonymousAccess);
}
catch (DuplicateNameException e) {
statusMessage = "Repository " + repositoryPanel.getRepositoryName() + " exists";
}
catch (UserAccessException exc) {
statusMessage = "Could not update the user list: " + exc.getMessage();
return;
}
catch (NotConnectedException e) {
statusMessage = e.getMessage();
if (statusMessage == null) {
statusMessage =
"Not connected to server " + serverInfo.getServerName() + ": " + e;
}
return;
}
catch (IOException exc) {
String msg = exc.getMessage();
if (msg == null) {
msg = exc.toString();
}
statusMessage = "Error occurred while updating the user list: " + msg;
return;
}
}
}
wizardMgr.close();
}
@Override
public void cancel() {
currentWizardPanel = null;
repositoryPanel = null;
projectAccessPanel = null;
server = null;
if (repository != null) {
repository.disconnect();
repository = null;
}
}
@Override
public void initialize() {
currentWizardPanel = null;
if (repositoryPanel != null) {
repositoryPanel.initialize();
}
if (projectAccessPanel != null) {
projectAccessPanel.initialize();
}
}
@Override
public Dimension getPanelSize() {
return getMyPanelSize();
}
@Override
public void setWizardManager(WizardManager wm) {
wizardMgr = wm;
}
@Override
public WizardManager getWizardManager() {
return wizardMgr;
}
/**
* Get the repository adapter associated with the new project.
* After displaying this panel, this method should be invoked to obtain the
* repository which will be opened for shared projects. If the repository is
* not used to create a new project, its disconnect method should be invoked.
* @return null if project is not shared
*/
RepositoryAdapter getProjectRepository() {
return repository;
}
String getProjectRepositoryName() {
return repositoryPanel.getRepositoryName();
}
private void checkNewRepositoryAccessPanel() {
String repositoryName = repositoryPanel.getRepositoryName();
if (projectAccessPanel != null &&
projectAccessPanel.getRepositoryName().equals(repositoryName)) {
return;
}
List<User> userList = new ArrayList<>();
userList.add(new User(server.getUser(), User.ADMIN));
projectAccessPanel = new ProjectAccessPanel(knownUsers, server.getUser(), userList,
repositoryName, includeAnonymousAccessControl, false, tool);
projectAccessPanel.setHelpLocation(
new HelpLocation(GenericHelpTopics.FRONT_END, "SetupUsers"));
}
/**
* Return true if a connection could be established using the given
* server name and port number.
*/
private boolean isServerInfoValid(String serverName, int portNumber) {
if (server != null && serverInfo != null && serverInfo.getServerName().equals(serverName) &&
serverInfo.getPortNumber() == portNumber && server.isConnected()) {
return true;
}
server = null;
serverInfo = null;
repositoryPanel = null;
server = projectMgr.getRepositoryServerAdapter(serverName, portNumber, true);
if (server.isConnected()) {
serverInfo = projectMgr.getMostRecentServerInfo();
return true;
}
statusMessage = "Could not connect to server " + serverName + ", port " + portNumber;
return false;
}
private Dimension getMyPanelSize() {
ProjectAccessPanel panel1 = new ProjectAccessPanel(new String[] { "nobody" }, "user",
new ArrayList<User>(), "MyRepository", true, false, tool);
RepositoryPanel panel2 = new RepositoryPanel(this, "ServerOne",
new String[] { "MyRepository", "NewStuff", "Repository_A", "Repository_B" }, false);
Dimension d1 = panel1.getPreferredSize();
Dimension d2 = panel2.getPreferredSize();
return new Dimension(Math.max(d1.width, d2.width), Math.max(d1.height, d2.height));
}
}
| |
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (c) 2015 Neustar Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*******************************************************************************/
package com.neulevel.epp.xri.response;
import java.util.*;
import org.w3c.dom.*;
import com.neulevel.epp.core.*;
import com.neulevel.epp.core.response.*;
/**
* This <code>EppResponseDataCreateXriNumber</code> class implements EPP
* Response Data entity for EPP Command Create of EPP XRI I-Number objects.
*
* @author Ning Zhang ning.zhang@neustar.com
* @version $Revision: 1.2 $ $Date: 2006/03/01 01:35:38 $
*/
public class EppResponseDataCreateXriNumber extends EppResponseDataCreate
{
private String inumber;
private String refId;
private Calendar exDate;
/**
* Creates an <code>EppResponseDataCreateXriNumber</code> object,
* with the current date as the creation date.
*
* @param inumber the i-number of the EPP XRI i-number object created
*/
public EppResponseDataCreateXriNumber( String inumber )
{
this(inumber, null, null, Calendar.getInstance());
}
/**
* Creates an <code>EppResponseDataCreateXriNumber</code> object,
* given the i-number of the XRI i-number object, and an expiration date,
* with the current date as the creation date
*
* @param inumber the i-number of the EPP XRI i-number object created
* @param exDate the expiration date of the XRI i-number object created
*/
public EppResponseDataCreateXriNumber( String inumber, Calendar exDate )
{
this(inumber, null, exDate, Calendar.getInstance());
}
/**
* Creates an <code>EppResponseDataCreateXriNumber</code> object,
* given the i-number of the XRI i-number object, a reference id,
* and an expiration date,
* with the current date as the creation date
*
* @param inumber the i-number of the EPP XRI i-number object created
* @param refId the reference id of the XRI i-number object created
* @param exDate the expiration date of the XRI i-number object created
*/
public EppResponseDataCreateXriNumber( String inumber, String refId, Calendar exDate )
{
this(inumber, refId, exDate, Calendar.getInstance());
}
/**
* Creates an <code>EppResponseDataCreateXriNumber</code> object,
* given the i-number of the XRI i-number object, a reference id,
* and an expiration date,
* with the current date as the creation date
*
* @param inumber the i-number of the EPP XRI i-number object created
* @param refId the reference id of the XRI i-number object created
* @param exDate the expiration date of the XRI i-number object created
* @param crDate the creation date of the XRI i-number object created
*/
public EppResponseDataCreateXriNumber( String inumber, String refId, Calendar exDate, Calendar crDate )
{
this.inumber = inumber;
this.refId = refId;
this.exDate = exDate;
this.crDate = crDate;
}
/**
* Sets the i-number
*/
public void setINumber( String inumber )
{
this.inumber = inumber;
}
/**
* Gets the i-number
*/
public String getINumber()
{
return this.inumber;
}
/**
* Gets expiration date of the XRI i-number object created
*/
public Calendar getDateExpired()
{
return this.exDate;
}
/**
* Sets expiration date of the XRI i-number object created
*/
public void setDateExpired( Calendar exDate )
{
this.exDate = exDate;
}
/**
* Gets the reference identifier used in generating the i-number, if any
*/
public String getReferenceId()
{
return this.refId;
}
/**
* Gets the reference identifier used in generating the i-number, if any
*/
public void setReferenceId( String refId )
{
this.refId = refId;
}
/**
* Converts an XML element into an
* <code>EppResponseDataCreateXriNumber</code> object. The caller of this
* method must make sure that the root node is the resData element of
* EPP responseType for creating an EPP XriNumber object.
*
* @param root root node for an
* <code>EppResponseDataCreateXriNumber</code> object
* in XML format
*
* @return an <code>EppResponseDataCreateXriNumber</code> object, or null
* if the node is invalid
*/
public static EppEntity fromXML( Node root )
{
String i_number = null;
String ref_id = null;
Calendar create_date = null;
Calendar expire_date = null;
NodeList list = root.getChildNodes();
for( int i = 0; i < list.getLength(); i++ )
{
Node node = list.item(i);
String name = node.getLocalName();
if( name == null )
{
continue;
}
if( name.equals("inumber") )
{
String id = EppUtil.getText(node);
if( (id != null) && (id.length() > 0) )
{
i_number = id;
}
}
else if( name.equals("refId") )
{
String id = EppUtil.getText(node);
if( (id != null) && (id.length() > 0) )
{
ref_id = id;
}
}
else if( name.equals("crDate") )
{
Calendar d = EppUtil.getDate(node);
if( d != null )
{
create_date = d;
}
}
else if( name.equals("exDate") )
{
Calendar d = EppUtil.getDate(node);
if( d != null )
{
expire_date = d;
}
}
}
return new EppResponseDataCreateXriNumber(i_number, ref_id, expire_date, create_date);
}
/**
* Converts an <code>EppResponseDataCreateXriNumber</code> object into
* an XML element.
*
* @param doc the XML <code>Document</code> object
* @param tag the tag/element name for the
* <code>EppResponseDataCreateXriNumber</code> object
*
* @return an <code>Element</code> object
*/
public Element toXML( Document doc, String tag )
{
Element elm;
Element body = doc.createElement(tag);
Element data = EppUtil.createElementNS(doc, "xriINU", "creData");
body.appendChild(data);
if( inumber != null )
{
elm = doc.createElement("inumber");
elm.appendChild(doc.createTextNode(inumber));
data.appendChild(elm);
}
if( refId != null )
{
elm = doc.createElement("refId");
elm.appendChild(doc.createTextNode(refId));
data.appendChild(elm);
}
if( crDate != null )
{
elm = doc.createElement("crDate");
elm.appendChild(EppUtil.createTextNode(doc, crDate));
data.appendChild(elm);
}
if( exDate != null )
{
elm = doc.createElement("exDate");
elm.appendChild(EppUtil.createTextNode(doc, exDate));
data.appendChild(elm);
}
return body;
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor.impl;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.command.CommandAdapter;
import com.intellij.openapi.command.CommandEvent;
import com.intellij.openapi.command.CommandListener;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.impl.CommandMerger;
import com.intellij.openapi.components.*;
import com.intellij.openapi.components.StoragePathMacros;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.*;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.fileEditor.ex.IdeDocumentHistory;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.util.xmlb.annotations.Transient;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.ref.WeakReference;
import java.util.*;
@State(
name = "IdeDocumentHistory",
storages = {@Storage( file = StoragePathMacros.WORKSPACE_FILE)}
)
public class IdeDocumentHistoryImpl extends IdeDocumentHistory implements ProjectComponent, PersistentStateComponent<IdeDocumentHistoryImpl.RecentlyChangedFilesState> {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.fileEditor.impl.IdeDocumentHistoryImpl");
private static final int BACK_QUEUE_LIMIT = 25;
private static final int CHANGE_QUEUE_LIMIT = 25;
private final Project myProject;
private final EditorFactory myEditorFactory;
private FileDocumentManager myFileDocumentManager;
private FileEditorManagerEx myEditorManager;
private final VirtualFileManager myVfManager;
private final CommandProcessor myCmdProcessor;
private final ToolWindowManager myToolWindowManager;
private final LinkedList<PlaceInfo> myBackPlaces = new LinkedList<PlaceInfo>(); // LinkedList of PlaceInfo's
private final LinkedList<PlaceInfo> myForwardPlaces = new LinkedList<PlaceInfo>(); // LinkedList of PlaceInfo's
private boolean myBackInProgress = false;
private boolean myForwardInProgress = false;
private Object myLastGroupId = null;
// change's navigation
private final LinkedList<PlaceInfo> myChangePlaces = new LinkedList<PlaceInfo>(); // LinkedList of PlaceInfo's
private int myStartIndex = 0;
private int myCurrentIndex = 0;
private PlaceInfo myCurrentChangePlace = null;
private PlaceInfo myCommandStartPlace = null;
private boolean myCurrentCommandIsNavigation = false;
private boolean myCurrentCommandHasChanges = false;
private final Set<VirtualFile> myChangedFilesInCurrentCommand = new THashSet<VirtualFile>();
private boolean myCurrentCommandHasMoves = false;
private final CommandListener myCommandListener = new CommandAdapter() {
@Override
public void commandStarted(CommandEvent event) {
onCommandStarted();
}
@Override
public void commandFinished(CommandEvent event) {
onCommandFinished(event.getCommandGroupId());
}
};
private RecentlyChangedFilesState myRecentlyChangedFiles = new RecentlyChangedFilesState();
public IdeDocumentHistoryImpl(@NotNull Project project,
@NotNull EditorFactory editorFactory,
@NotNull FileEditorManager editorManager,
@NotNull VirtualFileManager vfManager,
@NotNull CommandProcessor cmdProcessor,
@NotNull ToolWindowManager toolWindowManager) {
myProject = project;
myEditorFactory = editorFactory;
myEditorManager = (FileEditorManagerEx)editorManager;
myVfManager = vfManager;
myCmdProcessor = cmdProcessor;
myToolWindowManager = toolWindowManager;
}
@Override
public final void projectOpened() {
myEditorManager = (FileEditorManagerEx)FileEditorManager.getInstance(myProject);
EditorEventMulticaster eventMulticaster = myEditorFactory.getEventMulticaster();
DocumentListener documentListener = new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
onDocumentChanged(e);
}
};
eventMulticaster.addDocumentListener(documentListener, myProject);
CaretListener caretListener = new CaretListener() {
@Override
public void caretPositionChanged(CaretEvent e) {
onCaretPositionChanged(e);
}
};
eventMulticaster.addCaretListener(caretListener,myProject);
myProject.getMessageBus().connect().subscribe(FileEditorManagerListener.FILE_EDITOR_MANAGER, new FileEditorManagerAdapter() {
@Override
public void selectionChanged(@NotNull FileEditorManagerEvent e) {
onSelectionChanged();
}
});
VirtualFileListener fileListener = new VirtualFileAdapter() {
@Override
public void fileDeleted(VirtualFileEvent event) {
onFileDeleted();
}
};
myVfManager.addVirtualFileListener(fileListener,myProject);
myCmdProcessor.addCommandListener(myCommandListener,myProject);
}
public static class RecentlyChangedFilesState {
@Transient private List<String> CHANGED_PATHS = new ArrayList<String>();
public List<String> getChangedFiles() {
return CHANGED_PATHS;
}
public void setChangedFiles(List<String> changed) {
CHANGED_PATHS = changed;
}
public void register(VirtualFile file) {
final String path = file.getPath();
CHANGED_PATHS.remove(path);
CHANGED_PATHS.add(path);
trimToSize();
}
private void trimToSize(){
final int limit = UISettings.getInstance().RECENT_FILES_LIMIT + 1;
while(CHANGED_PATHS.size()>limit){
CHANGED_PATHS.remove(0);
}
}
}
@Override
public RecentlyChangedFilesState getState() {
return myRecentlyChangedFiles;
}
@Override
public void loadState(RecentlyChangedFilesState state) {
myRecentlyChangedFiles = state;
}
public final void onFileDeleted() {
removeInvalidFilesFromStacks();
}
public final void onSelectionChanged() {
myCurrentCommandIsNavigation = true;
myCurrentCommandHasMoves = true;
}
private void onCaretPositionChanged(CaretEvent e) {
if (e.getOldPosition().line == e.getNewPosition().line) return;
Document document = e.getEditor().getDocument();
if (getFileDocumentManager().getFile(document) != null) {
myCurrentCommandHasMoves = true;
}
}
private void onDocumentChanged(DocumentEvent e) {
Document document = e.getDocument();
final VirtualFile file = getFileDocumentManager().getFile(document);
if (file != null) {
myCurrentCommandHasChanges = true;
myChangedFilesInCurrentCommand.add(file);
}
}
public final void onCommandStarted() {
myCommandStartPlace = getCurrentPlaceInfo();
myCurrentCommandIsNavigation = false;
myCurrentCommandHasChanges = false;
myCurrentCommandHasMoves = false;
myChangedFilesInCurrentCommand.clear();
}
private PlaceInfo getCurrentPlaceInfo() {
final Pair<FileEditor,FileEditorProvider> selectedEditorWithProvider = getSelectedEditor();
if (selectedEditorWithProvider != null) {
return createPlaceInfo(selectedEditorWithProvider.getFirst (), selectedEditorWithProvider.getSecond ());
}
return null;
}
public final void onCommandFinished(Object commandGroupId) {
if (myCommandStartPlace != null) {
if (myCurrentCommandIsNavigation && myCurrentCommandHasMoves) {
if (!myBackInProgress) {
if (!CommandMerger.canMergeGroup(commandGroupId, myLastGroupId)) {
putLastOrMerge(myBackPlaces, myCommandStartPlace, BACK_QUEUE_LIMIT);
}
if (!myForwardInProgress) {
myForwardPlaces.clear();
}
}
removeInvalidFilesFromStacks();
}
}
myLastGroupId = commandGroupId;
if (myCurrentCommandHasChanges) {
setCurrentChangePlace();
}
else if (myCurrentCommandHasMoves) {
pushCurrentChangePlace();
}
}
@Override
public final void projectClosed() {
}
@Override
public final void includeCurrentCommandAsNavigation() {
myCurrentCommandIsNavigation = true;
}
@Override
public final void includeCurrentPlaceAsChangePlace() {
setCurrentChangePlace();
pushCurrentChangePlace();
}
private void setCurrentChangePlace() {
final Pair<FileEditor,FileEditorProvider> selectedEditorWithProvider = getSelectedEditor();
if (selectedEditorWithProvider == null) {
return;
}
final PlaceInfo placeInfo = createPlaceInfo(selectedEditorWithProvider.getFirst(), selectedEditorWithProvider.getSecond ());
final VirtualFile file = placeInfo.getFile();
if (myChangedFilesInCurrentCommand.contains(file)) {
myRecentlyChangedFiles.register(file);
myCurrentChangePlace = placeInfo;
if (!myChangePlaces.isEmpty()) {
final PlaceInfo lastInfo = myChangePlaces.get(myChangePlaces.size() - 1);
if (isSame(placeInfo, lastInfo)) {
myChangePlaces.removeLast();
}
}
myCurrentIndex = myStartIndex + myChangePlaces.size();
}
}
private void pushCurrentChangePlace() {
if (myCurrentChangePlace != null) {
myChangePlaces.add(myCurrentChangePlace);
if (myChangePlaces.size() > CHANGE_QUEUE_LIMIT) {
myChangePlaces.removeFirst();
myStartIndex++;
}
myCurrentChangePlace = null;
}
myCurrentIndex = myStartIndex + myChangePlaces.size();
}
@Override
public VirtualFile[] getChangedFiles() {
List<VirtualFile> files = new ArrayList<VirtualFile>();
final LocalFileSystem lfs = LocalFileSystem.getInstance();
final List<String> paths = myRecentlyChangedFiles.getChangedFiles();
for (String path : paths) {
final VirtualFile file = lfs.findFileByPath(path);
if (file != null) {
files.add(file);
}
}
return VfsUtil.toVirtualFileArray(files);
}
@Override
public final void clearHistory() {
clearPlaceList(myBackPlaces);
clearPlaceList(myForwardPlaces);
clearPlaceList(myChangePlaces);
myLastGroupId = null;
myStartIndex = 0;
myCurrentIndex = 0;
if (myCurrentChangePlace != null) {
myCurrentChangePlace = null;
}
if (myCommandStartPlace != null) {
myCommandStartPlace = null;
}
}
@Override
public final void back() {
removeInvalidFilesFromStacks();
if (myBackPlaces.isEmpty()) return;
final PlaceInfo info = myBackPlaces.removeLast();
PlaceInfo current = getCurrentPlaceInfo();
if (current != null) {
if (!isSame(current, info)) {
putLastOrMerge(myForwardPlaces, current, Integer.MAX_VALUE);
}
}
putLastOrMerge(myForwardPlaces, info, Integer.MAX_VALUE);
myBackInProgress = true;
executeCommand(new Runnable() {
@Override
public void run() {
gotoPlaceInfo(info);
}
}, "", null);
myBackInProgress = false;
}
@Override
public final void forward() {
removeInvalidFilesFromStacks();
final PlaceInfo target = getTargetForwardInfo();
if (target == null) return;
myForwardInProgress = true;
executeCommand(new Runnable() {
@Override
public void run() {
gotoPlaceInfo(target);
}
}, "", null);
myForwardInProgress = false;
}
private PlaceInfo getTargetForwardInfo() {
if (myForwardPlaces.isEmpty()) return null;
PlaceInfo target = myForwardPlaces.removeLast();
PlaceInfo current = getCurrentPlaceInfo();
while (!myForwardPlaces.isEmpty()) {
if (isSame(current, target)) {
target = myForwardPlaces.removeLast();
} else {
break;
}
}
return target;
}
@Override
public final boolean isBackAvailable() {
return !myBackPlaces.isEmpty();
}
@Override
public final boolean isForwardAvailable() {
return !myForwardPlaces.isEmpty();
}
@Override
public final void navigatePreviousChange() {
removeInvalidFilesFromStacks();
if (myCurrentIndex == myStartIndex) return;
int index = myCurrentIndex - 1;
final PlaceInfo info = myChangePlaces.get(index - myStartIndex);
executeCommand(new Runnable() {
@Override
public void run() {
gotoPlaceInfo(info);
}
}, "", null);
myCurrentIndex = index;
}
@Override
public final boolean isNavigatePreviousChangeAvailable() {
return myCurrentIndex > myStartIndex;
}
private void removeInvalidFilesFromStacks() {
removeInvalidFilesFrom(myBackPlaces);
removeInvalidFilesFrom(myForwardPlaces);
if (removeInvalidFilesFrom(myChangePlaces)) {
myCurrentIndex = myStartIndex + myChangePlaces.size();
}
}
private static boolean removeInvalidFilesFrom(final LinkedList<PlaceInfo> backPlaces) {
boolean removed = false;
for (Iterator<PlaceInfo> iterator = backPlaces.iterator(); iterator.hasNext();) {
PlaceInfo info = iterator.next();
final VirtualFile file = info.myFile;
if (!file.isValid()) {
iterator.remove();
removed = true;
}
}
return removed;
}
private void gotoPlaceInfo(@NotNull PlaceInfo info) { // TODO: Msk
final boolean wasActive = myToolWindowManager.isEditorComponentActive();
EditorWindow wnd = info.getWindow();
final Pair<FileEditor[],FileEditorProvider[]> editorsWithProviders;
if (wnd != null && wnd.isValid()) {
editorsWithProviders = myEditorManager.openFileWithProviders(info.getFile(), wasActive, wnd);
} else {
editorsWithProviders = myEditorManager.openFileWithProviders(info.getFile(), wasActive, false);
}
myEditorManager.setSelectedEditor(info.getFile(), info.getEditorTypeId());
final FileEditor [] editors = editorsWithProviders.getFirst();
final FileEditorProvider[] providers = editorsWithProviders.getSecond();
for (int i = 0; i < editors.length; i++) {
String typeId = providers [i].getEditorTypeId();
if (typeId.equals(info.getEditorTypeId())) {
editors[i].setState(info.getNavigationState());
}
}
}
/**
* @return currently selected FileEditor or null.
*/
protected Pair<FileEditor,FileEditorProvider> getSelectedEditor() {
VirtualFile file = myEditorManager.getCurrentFile();
return file != null ? myEditorManager.getSelectedEditorWithProvider(file) : null;
}
private PlaceInfo createPlaceInfo(@NotNull final FileEditor fileEditor, final FileEditorProvider fileProvider) {
final VirtualFile file = myEditorManager.getFile(fileEditor);
LOG.assertTrue(file != null);
final FileEditorState state = fileEditor.getState(FileEditorStateLevel.NAVIGATION);
return new PlaceInfo(file, state, fileProvider.getEditorTypeId(), myEditorManager.getCurrentWindow());
}
private static void clearPlaceList(LinkedList<PlaceInfo> list) {
list.clear();
}
@Override
@NotNull
public final String getComponentName() {
return "IdeDocumentHistory";
}
private static void putLastOrMerge(LinkedList<PlaceInfo> list, PlaceInfo next, int limitSizeLimit) {
if (!list.isEmpty()) {
PlaceInfo prev = list.get(list.size() - 1);
if (isSame(prev, next)) {
list.removeLast();
}
}
list.add(next);
if (list.size() > limitSizeLimit) {
list.removeFirst();
}
}
private FileDocumentManager getFileDocumentManager() {
if (myFileDocumentManager == null) {
myFileDocumentManager = FileDocumentManager.getInstance();
}
return myFileDocumentManager;
}
private static final class PlaceInfo {
private final VirtualFile myFile;
private final FileEditorState myNavigationState;
private final String myEditorTypeId;
private WeakReference<EditorWindow> myWindow;
public PlaceInfo(@NotNull VirtualFile file, FileEditorState navigationState, String editorTypeId, @Nullable EditorWindow window) {
myNavigationState = navigationState;
myFile = file;
myEditorTypeId = editorTypeId;
myWindow = new WeakReference<EditorWindow>(window);
}
public EditorWindow getWindow() {
return myWindow.get();
}
public FileEditorState getNavigationState() {
return myNavigationState;
}
@NotNull
public VirtualFile getFile() {
return myFile;
}
public String getEditorTypeId() {
return myEditorTypeId;
}
public String toString() {
return getFile().getName() + " " + getNavigationState();
}
}
public LinkedList<PlaceInfo> getBackPlaces() {
return myBackPlaces;
}
public LinkedList<PlaceInfo> getForwardPlaces() {
return myForwardPlaces;
}
@Override
public final void initComponent() { }
@Override
public final void disposeComponent() {
myLastGroupId = null;
}
protected void executeCommand(Runnable runnable, String name, Object groupId) {
myCmdProcessor.executeCommand(myProject, runnable, name, groupId);
}
private static boolean isSame(PlaceInfo first, PlaceInfo second) {
if (first.getFile().equals(second.getFile())) {
FileEditorState firstState = first.getNavigationState();
FileEditorState secondState = second.getNavigationState();
return firstState.equals(secondState) || firstState.canBeMergedWith(secondState, FileEditorStateLevel.NAVIGATION);
}
return false;
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.java.decompiler.modules.decompiler;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import org.jetbrains.java.decompiler.code.CodeConstants;
import org.jetbrains.java.decompiler.main.ClassesProcessor.ClassNode;
import org.jetbrains.java.decompiler.main.DecompilerContext;
import org.jetbrains.java.decompiler.main.extern.IFernflowerPreferences;
import org.jetbrains.java.decompiler.main.rels.ClassWrapper;
import org.jetbrains.java.decompiler.modules.decompiler.exps.ArrayExprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.AssignmentExprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.ConstExprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.ExitExprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.Exprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.FieldExprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.FunctionExprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.InvocationExprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.MonitorExprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.NewExprent;
import org.jetbrains.java.decompiler.modules.decompiler.exps.VarExprent;
import org.jetbrains.java.decompiler.modules.decompiler.sforms.SSAConstructorSparseEx;
import org.jetbrains.java.decompiler.modules.decompiler.stats.IfStatement;
import org.jetbrains.java.decompiler.modules.decompiler.stats.Statement;
import org.jetbrains.java.decompiler.modules.decompiler.vars.VarVersionPair;
import org.jetbrains.java.decompiler.struct.StructClass;
import org.jetbrains.java.decompiler.struct.gen.VarType;
import org.jetbrains.java.decompiler.struct.match.MatchEngine;
import org.jetbrains.java.decompiler.util.FastSparseSetFactory.FastSparseSet;
import org.jetbrains.java.decompiler.util.InterpreterUtil;
public class SimplifyExprentsHelper {
static final MatchEngine class14Builder = new MatchEngine();
private final boolean firstInvocation;
public SimplifyExprentsHelper(boolean firstInvocation) {
this.firstInvocation = firstInvocation;
}
public boolean simplifyStackVarsStatement(Statement stat, HashSet<Integer> setReorderedIfs, SSAConstructorSparseEx ssa, StructClass cl) {
boolean res = false;
if (stat.getExprents() == null) {
boolean processClass14 = DecompilerContext.getOption(IFernflowerPreferences.DECOMPILE_CLASS_1_4);
while (true) {
boolean changed = false;
for (Statement st : stat.getStats()) {
res |= simplifyStackVarsStatement(st, setReorderedIfs, ssa, cl);
// collapse composed if's
if (changed = IfHelper.mergeIfs(st, setReorderedIfs)) {
break;
}
// collapse iff ?: statement
if (changed = buildIff(st, ssa)) {
break;
}
// collapse inlined .class property in version 1.4 and before
if (processClass14 && (changed = collapseInlinedClass14(st))) {
break;
}
}
res |= changed;
if (!changed) {
break;
}
}
}
else {
res |= simplifyStackVarsExprents(stat.getExprents(), cl);
}
return res;
}
private boolean simplifyStackVarsExprents(List<Exprent> list, StructClass cl) {
boolean res = false;
int index = 0;
while (index < list.size()) {
Exprent current = list.get(index);
Exprent ret = isSimpleConstructorInvocation(current);
if (ret != null) {
list.set(index, ret);
res = true;
continue;
}
// lambda expression (Java 8)
ret = isLambda(current, cl);
if (ret != null) {
list.set(index, ret);
res = true;
continue;
}
// remove monitor exit
if (isMonitorExit(current)) {
list.remove(index);
res = true;
continue;
}
// trivial assignment of a stack variable
if (isTrivialStackAssignment(current)) {
list.remove(index);
res = true;
continue;
}
if (index == list.size() - 1) {
break;
}
Exprent next = list.get(index + 1);
// constructor invocation
if (isConstructorInvocationRemote(list, index)) {
list.remove(index);
res = true;
continue;
}
// remove getClass() invocation, which is part of a qualified new
if (DecompilerContext.getOption(IFernflowerPreferences.REMOVE_GET_CLASS_NEW)) {
if (isQualifiedNewGetClass(current, next)) {
list.remove(index);
res = true;
continue;
}
}
// direct initialization of an array
int arrcount = isArrayInitializer(list, index);
if (arrcount > 0) {
for (int i = 0; i < arrcount; i++) {
list.remove(index + 1);
}
res = true;
continue;
}
// add array initializer expression
if (addArrayInitializer(current, next)) {
list.remove(index + 1);
res = true;
continue;
}
// integer ++expr and --expr (except for vars!)
Exprent func = isPPIorMMI(current);
if (func != null) {
list.set(index, func);
res = true;
continue;
}
// expr++ and expr--
if (isIPPorIMM(current, next)) {
list.remove(index + 1);
res = true;
continue;
}
// assignment on stack
if (isStackAssignement(current, next)) {
list.remove(index + 1);
res = true;
continue;
}
if (!firstInvocation && isStackAssignement2(current, next)) {
list.remove(index + 1);
res = true;
continue;
}
index++;
}
return res;
}
private static boolean addArrayInitializer(Exprent first, Exprent second) {
if (first.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent as = (AssignmentExprent)first;
if (as.getRight().type == Exprent.EXPRENT_NEW && as.getLeft().type == Exprent.EXPRENT_VAR) {
NewExprent newex = (NewExprent)as.getRight();
if (!newex.getLstArrayElements().isEmpty()) {
VarExprent arrvar = (VarExprent)as.getLeft();
if (second.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent aas = (AssignmentExprent)second;
if (aas.getLeft().type == Exprent.EXPRENT_ARRAY) {
ArrayExprent arrex = (ArrayExprent)aas.getLeft();
if (arrex.getArray().type == Exprent.EXPRENT_VAR && arrvar.equals(arrex.getArray())
&& arrex.getIndex().type == Exprent.EXPRENT_CONST) {
int constvalue = ((ConstExprent)arrex.getIndex()).getIntValue();
if (constvalue < newex.getLstArrayElements().size()) {
Exprent init = newex.getLstArrayElements().get(constvalue);
if (init.type == Exprent.EXPRENT_CONST) {
ConstExprent cinit = (ConstExprent)init;
VarType arrtype = newex.getNewType().decreaseArrayDim();
ConstExprent defaultval = ExprProcessor.getDefaultArrayValue(arrtype);
if (cinit.equals(defaultval)) {
Exprent tempexpr = aas.getRight();
if (!tempexpr.containsExprent(arrvar)) {
newex.getLstArrayElements().set(constvalue, tempexpr);
if (tempexpr.type == Exprent.EXPRENT_NEW) {
NewExprent tempnewex = (NewExprent)tempexpr;
int dims = newex.getNewType().arrayDim;
if (dims > 1 && !tempnewex.getLstArrayElements().isEmpty()) {
tempnewex.setDirectArrayInit(true);
}
}
return true;
}
}
}
}
}
}
}
}
}
}
return false;
}
private static int isArrayInitializer(List<Exprent> list, int index) {
Exprent current = list.get(index);
if (current.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent as = (AssignmentExprent)current;
if (as.getRight().type == Exprent.EXPRENT_NEW && as.getLeft().type == Exprent.EXPRENT_VAR) {
NewExprent newex = (NewExprent)as.getRight();
if (newex.getExprType().arrayDim > 0 && newex.getLstDims().size() == 1 && newex.getLstArrayElements().isEmpty() &&
newex.getLstDims().get(0).type == Exprent.EXPRENT_CONST) {
int size = ((Integer)((ConstExprent)newex.getLstDims().get(0)).getValue()).intValue();
if (size == 0) {
return 0;
}
VarExprent arrvar = (VarExprent)as.getLeft();
HashMap<Integer, Exprent> mapInit = new HashMap<>();
int i = 1;
while (index + i < list.size() && i <= size) {
boolean found = false;
Exprent expr = list.get(index + i);
if (expr.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent aas = (AssignmentExprent)expr;
if (aas.getLeft().type == Exprent.EXPRENT_ARRAY) {
ArrayExprent arrex = (ArrayExprent)aas.getLeft();
if (arrex.getArray().type == Exprent.EXPRENT_VAR && arrvar.equals(arrex.getArray())
&& arrex.getIndex().type == Exprent.EXPRENT_CONST) {
int constvalue = ((ConstExprent)arrex.getIndex())
.getIntValue(); // TODO: check for a number type. Failure extremely improbable, but nevertheless...
if (constvalue < size && !mapInit.containsKey(constvalue)) {
if (!aas.getRight().containsExprent(arrvar)) {
mapInit.put(constvalue, aas.getRight());
found = true;
}
}
}
}
}
if (!found) {
break;
}
i++;
}
double fraction = ((double)mapInit.size()) / size;
if ((arrvar.isStack() && fraction > 0) || (size <= 7 && fraction >= 0.3) ||
(size > 7 && fraction >= 0.7)) {
List<Exprent> lstRet = new ArrayList<>();
VarType arrtype = newex.getNewType().decreaseArrayDim();
ConstExprent defaultval = ExprProcessor.getDefaultArrayValue(arrtype);
for (int j = 0; j < size; j++) {
lstRet.add(defaultval.copy());
}
int dims = newex.getNewType().arrayDim;
for (Entry<Integer, Exprent> ent : mapInit.entrySet()) {
Exprent tempexpr = ent.getValue();
lstRet.set(ent.getKey(), tempexpr);
if (tempexpr.type == Exprent.EXPRENT_NEW) {
NewExprent tempnewex = (NewExprent)tempexpr;
if (dims > 1 && !tempnewex.getLstArrayElements().isEmpty()) {
tempnewex.setDirectArrayInit(true);
}
}
}
newex.setLstArrayElements(lstRet);
return mapInit.size();
}
}
}
}
return 0;
}
private static boolean isTrivialStackAssignment(Exprent first) {
if (first.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent asf = (AssignmentExprent)first;
if (asf.getLeft().type == Exprent.EXPRENT_VAR && asf.getRight().type == Exprent.EXPRENT_VAR) {
VarExprent varleft = (VarExprent)asf.getLeft();
VarExprent varright = (VarExprent)asf.getRight();
if (varleft.getIndex() == varright.getIndex() && varleft.isStack() &&
varright.isStack()) {
return true;
}
}
}
return false;
}
private static boolean isStackAssignement2(Exprent first, Exprent second) { // e.g. 1.4-style class invocation
if (first.type == Exprent.EXPRENT_ASSIGNMENT && second.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent asf = (AssignmentExprent)first;
AssignmentExprent ass = (AssignmentExprent)second;
if (asf.getLeft().type == Exprent.EXPRENT_VAR && ass.getRight().type == Exprent.EXPRENT_VAR &&
asf.getLeft().equals(ass.getRight()) && ((VarExprent)asf.getLeft()).isStack()) {
if (ass.getLeft().type != Exprent.EXPRENT_VAR || !((VarExprent)ass.getLeft()).isStack()) {
asf.setRight(new AssignmentExprent(ass.getLeft(), asf.getRight(), ass.bytecode));
return true;
}
}
}
return false;
}
private static boolean isStackAssignement(Exprent first, Exprent second) {
if (first.type == Exprent.EXPRENT_ASSIGNMENT && second.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent asf = (AssignmentExprent)first;
AssignmentExprent ass = (AssignmentExprent)second;
while (true) {
if (asf.getRight().equals(ass.getRight())) {
if ((asf.getLeft().type == Exprent.EXPRENT_VAR && ((VarExprent)asf.getLeft()).isStack()) &&
(ass.getLeft().type != Exprent.EXPRENT_VAR || !((VarExprent)ass.getLeft()).isStack())) {
if (!ass.getLeft().containsExprent(asf.getLeft())) {
asf.setRight(ass);
return true;
}
}
}
if (asf.getRight().type == Exprent.EXPRENT_ASSIGNMENT) {
asf = (AssignmentExprent)asf.getRight();
}
else {
break;
}
}
}
return false;
}
private static Exprent isPPIorMMI(Exprent first) {
if (first.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent as = (AssignmentExprent)first;
if (as.getRight().type == Exprent.EXPRENT_FUNCTION) {
FunctionExprent func = (FunctionExprent)as.getRight();
if (func.getFuncType() == FunctionExprent.FUNCTION_ADD ||
func.getFuncType() == FunctionExprent.FUNCTION_SUB) {
Exprent econd = func.getLstOperands().get(0);
Exprent econst = func.getLstOperands().get(1);
if (econst.type != Exprent.EXPRENT_CONST && econd.type == Exprent.EXPRENT_CONST &&
func.getFuncType() == FunctionExprent.FUNCTION_ADD) {
econd = econst;
econst = func.getLstOperands().get(0);
}
if (econst.type == Exprent.EXPRENT_CONST && ((ConstExprent)econst).hasValueOne()) {
Exprent left = as.getLeft();
if (left.type != Exprent.EXPRENT_VAR && left.equals(econd)) {
FunctionExprent ret = new FunctionExprent(
func.getFuncType() == FunctionExprent.FUNCTION_ADD ? FunctionExprent.FUNCTION_PPI : FunctionExprent.FUNCTION_MMI,
econd, func.bytecode);
ret.setImplicitType(VarType.VARTYPE_INT);
return ret;
}
}
}
}
}
return null;
}
private static boolean isIPPorIMM(Exprent first, Exprent second) {
if (first.type == Exprent.EXPRENT_ASSIGNMENT && second.type == Exprent.EXPRENT_FUNCTION) {
AssignmentExprent as = (AssignmentExprent)first;
FunctionExprent in = (FunctionExprent)second;
if ((in.getFuncType() == FunctionExprent.FUNCTION_MMI || in.getFuncType() == FunctionExprent.FUNCTION_PPI) &&
in.getLstOperands().get(0).equals(as.getRight())) {
if (in.getFuncType() == FunctionExprent.FUNCTION_MMI) {
in.setFuncType(FunctionExprent.FUNCTION_IMM);
}
else {
in.setFuncType(FunctionExprent.FUNCTION_IPP);
}
as.setRight(in);
return true;
}
}
return false;
}
private static boolean isMonitorExit(Exprent first) {
if (first.type == Exprent.EXPRENT_MONITOR) {
MonitorExprent monexpr = (MonitorExprent)first;
if (monexpr.getMonType() == MonitorExprent.MONITOR_EXIT && monexpr.getValue().type == Exprent.EXPRENT_VAR
&& !((VarExprent)monexpr.getValue()).isStack()) {
return true;
}
}
return false;
}
private static boolean isQualifiedNewGetClass(Exprent first, Exprent second) {
if (first.type == Exprent.EXPRENT_INVOCATION) {
InvocationExprent invexpr = (InvocationExprent)first;
if (!invexpr.isStatic() && invexpr.getInstance().type == Exprent.EXPRENT_VAR && invexpr.getName().equals("getClass") &&
invexpr.getStringDescriptor().equals("()Ljava/lang/Class;")) {
List<Exprent> lstExprents = second.getAllExprents();
lstExprents.add(second);
for (Exprent expr : lstExprents) {
if (expr.type == Exprent.EXPRENT_NEW) {
NewExprent nexpr = (NewExprent)expr;
if (nexpr.getConstructor() != null && !nexpr.getConstructor().getLstParameters().isEmpty() &&
nexpr.getConstructor().getLstParameters().get(0).equals(invexpr.getInstance())) {
String classname = nexpr.getNewType().value;
ClassNode node = DecompilerContext.getClassProcessor().getMapRootClasses().get(classname);
if (node != null && node.type != ClassNode.CLASS_ROOT) {
return true;
}
}
}
}
}
}
return false;
}
// private static boolean isConstructorInvocationRemote(List<Exprent> list, int index) {
//
// Exprent current = list.get(index);
//
// if(current.type == Exprent.EXPRENT_ASSIGNMENT) {
// AssignmentExprent as = (AssignmentExprent)current;
//
// if(as.getLeft().type == Exprent.EXPRENT_VAR && as.getRight().type == Exprent.EXPRENT_NEW) {
//
// NewExprent newexpr = (NewExprent)as.getRight();
// VarType newtype = newexpr.getNewType();
// VarVersionPair leftPaar = new VarVersionPair((VarExprent)as.getLeft());
//
// if(newtype.type == CodeConstants.TYPE_OBJECT && newtype.arrayDim == 0 &&
// newexpr.getConstructor() == null) {
//
// Set<VarVersionPair> setChangedVars = new HashSet<VarVersionPair>();
//
// for(int i = index + 1; i < list.size(); i++) {
// Exprent remote = list.get(i);
//
// if(remote.type == Exprent.EXPRENT_INVOCATION) {
// InvocationExprent in = (InvocationExprent)remote;
//
// if(in.getFuncType() == InvocationExprent.TYP_INIT && in.getInstance().type == Exprent.EXPRENT_VAR
// && as.getLeft().equals(in.getInstance())) {
//
// Set<VarVersionPair> setVars = remote.getAllVariables();
// setVars.remove(leftPaar);
// setVars.retainAll(setChangedVars);
//
// if(setVars.isEmpty()) {
//
// newexpr.setConstructor(in);
// in.setInstance(null);
//
// if(!setChangedVars.isEmpty()) { // some exprents inbetween
// list.add(index+1, as.copy());
// list.remove(i+1);
// } else {
// list.set(i, as.copy());
// }
//
// return true;
// }
// }
// }
//
// boolean isTempAssignment = false;
//
// if(remote.type == Exprent.EXPRENT_ASSIGNMENT) { // ugly solution
// AssignmentExprent asremote = (AssignmentExprent)remote;
// if(asremote.getLeft().type == Exprent.EXPRENT_VAR &&
// asremote.getRight().type == Exprent.EXPRENT_VAR) {
// setChangedVars.add(new VarVersionPair((VarExprent)asremote.getLeft()));
// isTempAssignment = true;
// }
//
// // FIXME: needs to be rewritten
// // propagate (var = new X) forward to the <init> invokation and then reduce
//
//// if(asremote.getLeft().type == Exprent.EXPRENT_VAR) {
//// List<Exprent> lstRightExprents = asremote.getRight().getAllExprents(true);
//// lstRightExprents.add(asremote.getRight());
////
//// Set<VarVersionPair> setTempChangedVars = new HashSet<VarVersionPair>();
//// boolean isTemp = true;
////
//// for(Exprent expr : lstRightExprents) {
//// if(expr.type != Exprent.EXPRENT_VAR && expr.type != Exprent.EXPRENT_FIELD) {
//// isTemp = false;
//// break;
//// } else if(expr.type == Exprent.EXPRENT_VAR) {
//// setTempChangedVars.add(new VarVersionPair((VarExprent)expr));
//// }
//// }
////
//// if(isTemp) {
//// setChangedVars.addAll(setTempChangedVars);
//// isTempAssignment = true;
//// }
//// }
//// } else if(remote.type == Exprent.EXPRENT_FUNCTION) {
//// FunctionExprent fexpr = (FunctionExprent)remote;
//// if(fexpr.getFuncType() == FunctionExprent.FUNCTION_IPP || fexpr.getFuncType() == FunctionExprent.FUNCTION_IMM
//// || fexpr.getFuncType() == FunctionExprent.FUNCTION_PPI || fexpr.getFuncType() == FunctionExprent.FUNCTION_MMI) {
//// if(fexpr.getLstOperands().get(0).type == Exprent.EXPRENT_VAR) {
//// setChangedVars.add(new VarVersionPair((VarExprent)fexpr.getLstOperands().get(0)));
//// isTempAssignment = true;
//// }
//// }
// }
//
// if(!isTempAssignment) {
// Set<VarVersionPair> setVars = remote.getAllVariables();
// if(setVars.contains(leftPaar)) {
// return false;
// } else {
// setChangedVars.addAll(setVars);
// }
// }
// }
// }
// }
// }
//
// return false;
// }
// propagate (var = new X) forward to the <init> invokation
private static boolean isConstructorInvocationRemote(List<Exprent> list, int index) {
Exprent current = list.get(index);
if (current.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent as = (AssignmentExprent)current;
if (as.getLeft().type == Exprent.EXPRENT_VAR && as.getRight().type == Exprent.EXPRENT_NEW) {
NewExprent newexpr = (NewExprent)as.getRight();
VarType newtype = newexpr.getNewType();
VarVersionPair leftPaar = new VarVersionPair((VarExprent)as.getLeft());
if (newtype.type == CodeConstants.TYPE_OBJECT && newtype.arrayDim == 0 && newexpr.getConstructor() == null) {
for (int i = index + 1; i < list.size(); i++) {
Exprent remote = list.get(i);
// <init> invocation
if (remote.type == Exprent.EXPRENT_INVOCATION) {
InvocationExprent in = (InvocationExprent)remote;
if (in.getFunctype() == InvocationExprent.TYP_INIT &&
in.getInstance().type == Exprent.EXPRENT_VAR &&
as.getLeft().equals(in.getInstance())) {
newexpr.setConstructor(in);
in.setInstance(null);
list.set(i, as.copy());
return true;
}
}
// check for variable in use
Set<VarVersionPair> setVars = remote.getAllVariables();
if (setVars.contains(leftPaar)) { // variable used somewhere in between -> exit, need a better reduced code
return false;
}
}
}
}
}
return false;
}
private static Exprent isLambda(Exprent exprent, StructClass cl) {
List<Exprent> lst = exprent.getAllExprents();
for (Exprent expr : lst) {
Exprent ret = isLambda(expr, cl);
if (ret != null) {
exprent.replaceExprent(expr, ret);
}
}
if (exprent.type == Exprent.EXPRENT_INVOCATION) {
InvocationExprent in = (InvocationExprent)exprent;
if (in.getInvocationTyp() == InvocationExprent.INVOKE_DYNAMIC) {
String lambda_class_name = cl.qualifiedName + in.getInvokeDynamicClassSuffix();
ClassNode lambda_class = DecompilerContext.getClassProcessor().getMapRootClasses().get(lambda_class_name);
if (lambda_class != null) { // real lambda class found, replace invocation with an anonymous class
NewExprent newexp = new NewExprent(new VarType(lambda_class_name, true), null, 0, in.bytecode);
newexp.setConstructor(in);
// note: we don't set the instance to null with in.setInstance(null) like it is done for a common constructor invokation
// lambda can also be a reference to a virtual method (e.g. String x; ...(x::toString);)
// in this case instance will hold the corresponding object
return newexp;
}
}
}
return null;
}
private static Exprent isSimpleConstructorInvocation(Exprent exprent) {
List<Exprent> lst = exprent.getAllExprents();
for (Exprent expr : lst) {
Exprent ret = isSimpleConstructorInvocation(expr);
if (ret != null) {
exprent.replaceExprent(expr, ret);
}
}
if (exprent.type == Exprent.EXPRENT_INVOCATION) {
InvocationExprent in = (InvocationExprent)exprent;
if (in.getFunctype() == InvocationExprent.TYP_INIT && in.getInstance().type == Exprent.EXPRENT_NEW) {
NewExprent newexp = (NewExprent)in.getInstance();
newexp.setConstructor(in);
in.setInstance(null);
return newexp;
}
}
return null;
}
private static boolean buildIff(Statement stat, SSAConstructorSparseEx ssa) {
if (stat.type == Statement.TYPE_IF && stat.getExprents() == null) {
IfStatement stif = (IfStatement)stat;
Exprent ifheadexpr = stif.getHeadexprent();
Set<Integer> ifheadexpr_bytecode = (ifheadexpr == null ? null : ifheadexpr.bytecode);
if (stif.iftype == IfStatement.IFTYPE_IFELSE) {
Statement ifstat = stif.getIfstat();
Statement elsestat = stif.getElsestat();
if (ifstat.getExprents() != null && ifstat.getExprents().size() == 1
&& elsestat.getExprents() != null && elsestat.getExprents().size() == 1
&& ifstat.getAllSuccessorEdges().size() == 1 && elsestat.getAllSuccessorEdges().size() == 1
&& ifstat.getAllSuccessorEdges().get(0).getDestination() == elsestat.getAllSuccessorEdges().get(0).getDestination()) {
Exprent ifexpr = ifstat.getExprents().get(0);
Exprent elseexpr = elsestat.getExprents().get(0);
if (ifexpr.type == Exprent.EXPRENT_ASSIGNMENT && elseexpr.type == Exprent.EXPRENT_ASSIGNMENT) {
AssignmentExprent ifas = (AssignmentExprent)ifexpr;
AssignmentExprent elseas = (AssignmentExprent)elseexpr;
if (ifas.getLeft().type == Exprent.EXPRENT_VAR && elseas.getLeft().type == Exprent.EXPRENT_VAR) {
VarExprent ifvar = (VarExprent)ifas.getLeft();
VarExprent elsevar = (VarExprent)elseas.getLeft();
if (ifvar.getIndex() == elsevar.getIndex() && ifvar.isStack()) { // ifvar.getIndex() >= VarExprent.STACK_BASE) {
boolean found = false;
for (Entry<VarVersionPair, FastSparseSet<Integer>> ent : ssa.getPhi().entrySet()) {
if (ent.getKey().var == ifvar.getIndex()) {
if (ent.getValue().contains(ifvar.getVersion()) && ent.getValue().contains(elsevar.getVersion())) {
found = true;
break;
}
}
}
if (found) {
List<Exprent> data = new ArrayList<>();
data.addAll(stif.getFirst().getExprents());
data.add(new AssignmentExprent(ifvar, new FunctionExprent(FunctionExprent.FUNCTION_IIF,
Arrays.asList(
stif.getHeadexprent().getCondition(),
ifas.getRight(),
elseas.getRight()), ifheadexpr_bytecode), ifheadexpr_bytecode));
stif.setExprents(data);
if (stif.getAllSuccessorEdges().isEmpty()) {
StatEdge ifedge = ifstat.getAllSuccessorEdges().get(0);
StatEdge edge = new StatEdge(ifedge.getType(), stif, ifedge.getDestination());
stif.addSuccessor(edge);
if (ifedge.closure != null) {
ifedge.closure.addLabeledEdge(edge);
}
}
SequenceHelper.destroyAndFlattenStatement(stif);
return true;
}
}
}
}
else if (ifexpr.type == Exprent.EXPRENT_EXIT && elseexpr.type == Exprent.EXPRENT_EXIT) {
ExitExprent ifex = (ExitExprent)ifexpr;
ExitExprent elseex = (ExitExprent)elseexpr;
if (ifex.getExitType() == elseex.getExitType() && ifex.getValue() != null && elseex.getValue() != null &&
ifex.getExitType() == ExitExprent.EXIT_RETURN) {
// throw is dangerous, because of implicit casting to a common superclass
// e.g. throws IOException and throw true?new RuntimeException():new IOException(); won't work
if (ifex.getExitType() == ExitExprent.EXIT_THROW &&
!ifex.getValue().getExprType().equals(elseex.getValue().getExprType())) { // note: getExprType unreliable at this point!
return false;
}
// avoid flattening to 'iff' if any of the branches is an 'iff' already
if (isIff(ifex.getValue()) || isIff(elseex.getValue())) {
return false;
}
List<Exprent> data = new ArrayList<>();
data.addAll(stif.getFirst().getExprents());
data.add(new ExitExprent(ifex.getExitType(), new FunctionExprent(FunctionExprent.FUNCTION_IIF,
Arrays.asList(
stif.getHeadexprent().getCondition(),
ifex.getValue(),
elseex.getValue()), ifheadexpr_bytecode), ifex.getRetType(), ifheadexpr_bytecode));
stif.setExprents(data);
StatEdge retedge = ifstat.getAllSuccessorEdges().get(0);
stif.addSuccessor(new StatEdge(StatEdge.TYPE_BREAK, stif, retedge.getDestination(),
retedge.closure == stif ? stif.getParent() : retedge.closure));
SequenceHelper.destroyAndFlattenStatement(stif);
return true;
}
}
}
}
}
return false;
}
private static boolean isIff(Exprent exp) {
return exp.type == Exprent.EXPRENT_FUNCTION && ((FunctionExprent) exp).getFuncType() == FunctionExprent.FUNCTION_IIF;
}
static {
class14Builder.parse(
"statement type:if iftype:if exprsize:-1\n" +
" exprent position:head type:if\n" +
" exprent type:function functype:eq\n" +
" exprent type:field name:$fieldname$\n" +
" exprent type:constant consttype:null\n" +
" statement type:basicblock\n" +
" exprent position:-1 type:assignment ret:$assignfield$\n" +
" exprent type:var index:$var$\n" +
" exprent type:field name:$fieldname$\n" +
" statement type:sequence statsize:2\n" +
" statement type:trycatch\n" +
" statement type:basicblock exprsize:1\n" +
" exprent type:assignment\n" +
" exprent type:var index:$var$\n" +
" exprent type:invocation invclass:java/lang/Class signature:forName(Ljava/lang/String;)Ljava/lang/Class;\n" +
" exprent position:0 type:constant consttype:string constvalue:$classname$\n" +
" statement type:basicblock exprsize:1\n" +
" exprent type:exit exittype:throw\n" +
" statement type:basicblock exprsize:1\n" +
" exprent type:assignment\n" +
" exprent type:field name:$fieldname$ ret:$field$\n" +
" exprent type:var index:$var$"
);
}
private static boolean collapseInlinedClass14(Statement stat) {
boolean ret = class14Builder.match(stat);
if(ret) {
String class_name = (String)class14Builder.getVariableValue("$classname$");
AssignmentExprent assfirst = (AssignmentExprent)class14Builder.getVariableValue("$assignfield$");
FieldExprent fieldexpr = (FieldExprent)class14Builder.getVariableValue("$field$");
assfirst.replaceExprent(assfirst.getRight(), new ConstExprent(VarType.VARTYPE_CLASS, class_name, null));
List<Exprent> data = new ArrayList<>();
data.addAll(stat.getFirst().getExprents());
stat.setExprents(data);
SequenceHelper.destroyAndFlattenStatement(stat);
ClassWrapper wrapper = (ClassWrapper)DecompilerContext.getProperty(DecompilerContext.CURRENT_CLASS_WRAPPER);
if (wrapper != null) {
wrapper.getHiddenMembers().add(InterpreterUtil.makeUniqueKey(fieldexpr.getName(), fieldexpr.getDescriptor().descriptorString));
}
}
return ret;
}
}
| |
// CSE 373 Homework 2 (Star Chart)
// instructor-provided file
import java.util.*;
/**
* This is a basic testing program for your Star class.
* It will help you to write some of your methods even if StarChart is not completed.
* Please feel free to modify it to add your own testing code.
*/
public class TestStar {
/** Runs the testing program. */
public static void main(String[] args) {
toStringTests();
equalsTests();
compareToTests1();
compareToTests2();
distanceTests1();
distanceTests2();
}
/**
* Performs a series of tests of the toString method.
*/
public static void toStringTests() {
System.out.println("Constructing some Stars ...");
Star star1 = new Star(0.25, -0.5, 0.125, 4.75);
Star star2 = new Star(-0.25, 0.6, 0.85, 6.75);
Star star3 = new Star(0.35, -0.1, -0.5, 0.75);
Star star4 = new Star(0.6, 0.1, 0.4, 2.75);
Star star5 = new Star(0.8, -0.3, -0.3, 5.75);
System.out.println();
System.out.println("toString:");
System.out.println(star1);
System.out.println(star2);
System.out.println(star3);
System.out.println(star4);
System.out.println(star5);
}
/**
* Performs a series of tests of the equals method.
*/
public static void equalsTests() {
Star star1 = new Star(0.25, -0.5, 0.125, 4.75);
Star star1copy = new Star(0.25, -0.5, 0.125, 4.75);
Star star1similar1 = new Star(0.75, -0.5, 0.125, 4.75);
Star star1similar3 = new Star(0.25, -0.5, 0.625, 4.75);
Star star1similar4 = new Star(0.25, -0.5, 0.625, 2.0);
Star star2 = new Star(-0.25, 0.6, 0.85, 6.75);
Star star2copy = new Star(-0.25, 0.6, 0.85, 6.75);
Star star3 = new Star(0.35, -0.1, -0.5, 0.75);
Star star4 = new Star(0.6, 0.1, 0.4, 2.75);
Star star5 = new Star(0.8, -0.3, -0.3, 5.75);
Star star5copy = new Star(0.8, -0.3, -0.3, 5.75);
System.out.println();
System.out.println("equals:");
equalsTestHelper(star1, star1, true);
equalsTestHelper(star1, star1copy, true);
equalsTestHelper(star1, star1similar1, false);
equalsTestHelper(star1, star1similar3, false);
equalsTestHelper(star1, star1similar4, false);
equalsTestHelper(star1, star2, false);
equalsTestHelper(star1, star3, false);
equalsTestHelper(star1, star4, false);
equalsTestHelper(star1, star5, false);
equalsTestHelper(star2, star2copy, true);
equalsTestHelper(star2, star3, false);
equalsTestHelper(star2, star5, false);
equalsTestHelper(star3, star3, true);
equalsTestHelper(star4, star4, true);
equalsTestHelper(star5, star5copy, true);
equalsTestHelper(star1, "\"uh-oh\"", false);
equalsTestHelper(star1, 3.14, false);
}
/**
* Performs a series of tests of the compareTo method.
*/
public static void compareToTests1() {
Star star1 = new Star(0.25, -0.5, 0.125, 4.75);
Star star1copy = new Star(0.25, -0.5, 0.125, 4.75);
Star star1similar1 = new Star(0.75, -0.5, 0.125, 4.75);
Star star1similar2 = new Star(0.25, -0.8, 0.125, 4.75);
Star star1similar3 = new Star(0.25, -0.5, 0.625, 4.75);
Star star1similar4 = new Star(0.25, -0.5, 0.625, 2.0);
Star star2 = new Star(-0.25, 0.6, 0.85, 6.75);
Star star3 = new Star(0.35, -0.1, -0.5, 0.75);
Star star4 = new Star(0.6, 0.1, 0.4, 2.75);
Star star5 = new Star(0.8, -0.3, -0.3, 5.75);
System.out.println();
System.out.println("compareTo:");
compareToTestHelper(star1, star1copy, 0.0);
compareToTestHelper(star1, star1similar1, -1.0);
compareToTestHelper(star1, star1similar2, 1.0);
compareToTestHelper(star1, star1similar3, -1.0);
compareToTestHelper(star1, star1similar4, -1.0);
compareToTestHelper(star1similar1, star1similar2, 1.0);
compareToTestHelper(star1similar1, star1similar4, -1.0);
compareToTestHelper(star1similar2, star1similar2, 0.0);
compareToTestHelper(star1similar2, star1similar3, -1.0);
compareToTestHelper(star1similar2, star1similar4, -1.0);
compareToTestHelper(star1similar3, star1similar4, 1.0);
compareToTestHelper(star1, star2, -1.0);
compareToTestHelper(star2, star1, 1.0);
compareToTestHelper(star1, star3, 1.0);
compareToTestHelper(star3, star1, -1.0);
compareToTestHelper(star1, star4, -1.0);
compareToTestHelper(star4, star1, 1.0);
compareToTestHelper(star1, star5, 1.0);
}
/**
* Performs a series of tests of the compareTo method.
*/
public static void compareToTests2() {
Star star1 = new Star(0.25, -0.5, 0.125, 4.75);
Star star1copy = new Star(0.25, -0.5, 0.125, 4.75);
Star star1similar1 = new Star(0.75, -0.5, 0.125, 4.75);
Star star1similar2 = new Star(0.25, -0.8, 0.125, 4.75);
Star star1similar3 = new Star(0.25, -0.5, 0.625, 4.75);
Star star1similar4 = new Star(0.25, -0.5, 0.625, 2.0);
Star star2 = new Star(-0.25, 0.6, 0.85, 6.75);
Star star2copy = new Star(-0.25, 0.6, 0.85, 6.75);
Star star3 = new Star(0.35, -0.1, -0.5, 0.75);
Star star4 = new Star(0.6, 0.1, 0.4, 2.75);
Star star5 = new Star(0.8, -0.3, -0.3, 5.75);
Star star5copy = new Star(0.8, -0.3, -0.3, 5.75);
compareToTestHelper(star5, star1, -1.0);
compareToTestHelper(star2, star2copy, 0.0);
compareToTestHelper(star2, star3, 1.0);
compareToTestHelper(star2, star4, 1.0);
compareToTestHelper(star4, star2, -1.0);
compareToTestHelper(star2, star5, 1.0);
compareToTestHelper(star3, star4, -1.0);
compareToTestHelper(star4, star3, 1.0);
compareToTestHelper(star3, star5, -1.0);
compareToTestHelper(star5, star3, 1.0);
compareToTestHelper(star4, star5, 1.0);
compareToTestHelper(star5, star4, -1.0);
compareToTestHelper(star5, star5copy, 0.0);
// comparable stars should be able to be sorted
List<Star> starlist = Arrays.asList(star1, star1copy, star2copy,
star1similar1, star2, star3, star5copy, star1similar2,
star4, star5, star1similar3, star1similar4);
System.out.println();
System.out.println("list of stars: " + starlist);
Collections.sort(starlist);
System.out.println("sorted stars: " + starlist);
}
/**
* Performs a series of tests of the distance method.
*/
public static void distanceTests1() {
Star star1 = new Star(0.25, -0.5, 0.125, 4.75);
Star star1copy = new Star(0.25, -0.5, 0.125, 4.75);
Star star1similar1 = new Star(0.75, -0.5, 0.125, 4.75);
Star star1similar2 = new Star(0.25, -0.8, 0.125, 4.75);
Star star1similar3 = new Star(0.25, -0.5, 0.625, 4.75);
Star star1similar4 = new Star(0.25, -0.5, 0.625, 2.0);
Star star2 = new Star(-0.25, 0.6, 0.85, 6.75);
Star star3 = new Star(0.35, -0.1, -0.5, 0.75);
Star star4 = new Star(0.6, 0.1, 0.4, 2.75);
System.out.println();
System.out.println("distance:");
distanceTestHelper(star1, star1copy, 0.0);
distanceTestHelper(star1, star1similar1, 0.5);
distanceTestHelper(star1, star1similar2, 0.3);
distanceTestHelper(star1, star1similar3, 0.5);
distanceTestHelper(star1, star1similar4, 0.5);
distanceTestHelper(star1similar1, star1similar2, 0.5831);
distanceTestHelper(star1similar1, star1similar3, 0.7071);
distanceTestHelper(star1similar1, star1similar4, 0.7071);
distanceTestHelper(star1similar2, star1similar2, 0.0);
distanceTestHelper(star1similar2, star1similar3, 0.5831);
distanceTestHelper(star1similar2, star1similar4, 0.5831);
distanceTestHelper(star1similar3, star1similar4, 0.0);
distanceTestHelper(star1, star2, 1.4091);
distanceTestHelper(star2, star1, 1.4091);
distanceTestHelper(star1, star3, 0.7487);
distanceTestHelper(star3, star1, 0.7487);
distanceTestHelper(star1, star4, 0.7471);
distanceTestHelper(star4, star1, 0.7471);
}
/**
* Performs a series of tests of the distance method.
*/
public static void distanceTests2() {
Star star1 = new Star(0.25, -0.5, 0.125, 4.75);
Star star2 = new Star(-0.25, 0.6, 0.85, 6.75);
Star star2copy = new Star(-0.25, 0.6, 0.85, 6.75);
Star star3 = new Star(0.35, -0.1, -0.5, 0.75);
Star star4 = new Star(0.6, 0.1, 0.4, 2.75);
Star star5 = new Star(0.8, -0.3, -0.3, 5.75);
Star star5copy = new Star(0.8, -0.3, -0.3, 5.75);
distanceTestHelper(star1, star5, 0.7233);
distanceTestHelper(star5, star1, 0.7233);
distanceTestHelper(star2, star2copy, 0.0);
distanceTestHelper(star2, star3, 1.6348);
distanceTestHelper(star3, star2, 1.6348);
distanceTestHelper(star2, star4, 1.0840);
distanceTestHelper(star4, star2, 1.0840);
distanceTestHelper(star2, star5, 1.7986);
distanceTestHelper(star5, star2, 1.7986);
distanceTestHelper(star3, star4, 0.9552);
distanceTestHelper(star4, star3, 0.9552);
distanceTestHelper(star3, star5, 0.5315);
distanceTestHelper(star5, star3, 0.5315);
distanceTestHelper(star4, star5, 0.8307);
distanceTestHelper(star5, star4, 0.8307);
distanceTestHelper(star5, star5copy, 0.0);
}
/**
* Performs one compareTo test on the given two stars, printing the result
* as well as printing what numeric range (positive, negative, or 0) the
* result should be in for comparison purposes.
*/
public static void compareToTestHelper(Star star1, Star star2, double expected) {
System.out.printf("%-22s .compareTo %-22s (should be %s)? ", star1,
star2, signString(expected));
System.out.println(signString(star1.compareTo(star2)));
}
/**
* Performs one distance test on the given two stars, printing the result
* as well as printing what the real number result should be for comparison.
*/
public static void distanceTestHelper(Star star1, Star star2, double expected) {
System.out.printf("%-22s .distance %-22s (should be %.4f)? %.4f\n",
star1, star2, expected, star1.distance(star2));
}
/**
* Performs one equals test on the given two stars, printing the result
* as well as printing what the boolean result should be for comparison.
*/
public static void equalsTestHelper(Star star1, Object star2, boolean expected) {
System.out.printf("%-22s .equals %-22s (should be %-5s)? ",
star1, star2, expected);
System.out.println(star1.equals(star2));
}
/**
* Helper to return a string about the sign of the given real number:
* + for positive, - for negative, or 0 for 0.0.
* This is used because compareTo can return any number in those ranges
* and still be correct; it does not have to match any one specific value.
*/
private static String signString(double value) {
if (value > 0.0) {
return "+";
} else if (value < 0.0) {
return "-";
} else {
return "0";
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.search;
import com.intellij.lang.ASTNode;
import com.intellij.lang.Language;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.psi.search.TextOccurenceProcessor;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ConcurrencyUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.text.StringSearcher;
import gnu.trove.TIntArrayList;
import gnu.trove.TIntProcedure;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
public class LowLevelSearchUtil {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.search.LowLevelSearchUtil");
// TRUE/FALSE -> injected psi has been discovered and processor returned true/false;
// null -> there were nothing injected found
private static Boolean processInjectedFile(PsiElement element,
final TextOccurenceProcessor processor,
final StringSearcher searcher,
ProgressIndicator progress,
InjectedLanguageManager injectedLanguageManager) {
if (!(element instanceof PsiLanguageInjectionHost)) return null;
if (injectedLanguageManager == null) return null;
List<Pair<PsiElement,TextRange>> list = injectedLanguageManager.getInjectedPsiFiles(element);
if (list == null) return null;
for (Pair<PsiElement, TextRange> pair : list) {
final PsiElement injected = pair.getFirst();
if (!processElementsContainingWordInElement(processor, injected, searcher, false, progress)) return Boolean.FALSE;
}
return Boolean.TRUE;
}
/**
* @return null to stop or last found TreeElement
* to be reused via <code>lastElement<code/> param in subsequent calls to avoid full tree rescan (n^2->n).
*/
private static TreeElement processTreeUp(@NotNull Project project,
@NotNull TextOccurenceProcessor processor,
@NotNull PsiElement scope,
@NotNull StringSearcher searcher,
final int offset,
final boolean processInjectedPsi,
ProgressIndicator progress,
TreeElement lastElement) {
final int scopeStartOffset = scope.getTextRange().getStartOffset();
final int patternLength = searcher.getPatternLength();
ASTNode scopeNode = scope.getNode();
boolean useTree = scopeNode != null;
assert scope.isValid();
int start;
TreeElement leafNode = null;
PsiElement leafElement = null;
if (useTree) {
leafNode = findNextLeafElementAt(scopeNode, lastElement, offset);
if (leafNode == null) return lastElement;
start = offset - leafNode.getStartOffset() + scopeStartOffset;
}
else {
if (scope instanceof PsiFile) {
leafElement = ((PsiFile)scope).getViewProvider().findElementAt(offset, scope.getLanguage());
}
else {
leafElement = scope.findElementAt(offset);
}
if (leafElement == null) return lastElement;
assert leafElement.isValid();
start = offset - leafElement.getTextRange().getStartOffset() + scopeStartOffset;
}
if (start < 0) {
throw new AssertionError("offset=" + offset + " scopeStartOffset=" + scopeStartOffset + " leafElement=" + leafElement + " scope=" + scope);
}
InjectedLanguageManager injectedLanguageManager = InjectedLanguageManager.getInstance(project);
lastElement = leafNode;
boolean contains = false;
PsiElement prev = null;
TreeElement prevNode = null;
PsiElement run = null;
while (run != scope) {
if (progress != null) progress.checkCanceled();
if (useTree) {
start += prevNode == null ? 0 : prevNode.getStartOffsetInParent();
prevNode = leafNode;
run = leafNode.getPsi();
}
else {
start += prev == null ? 0 : prev.getStartOffsetInParent();
prev = run;
run = leafElement;
}
if (!contains) contains = run.getTextLength() - start >= patternLength; //do not compute if already contains
if (contains) {
if (processInjectedPsi) {
Boolean result = processInjectedFile(run, processor, searcher, progress, injectedLanguageManager);
if (result != null) {
return result.booleanValue() ? lastElement : null;
}
}
if (!processor.execute(run, start)) {
return null;
}
}
if (useTree) {
leafNode = leafNode.getTreeParent();
if (leafNode == null) break;
}
else {
leafElement = leafElement.getParent();
if (leafElement == null) break;
}
}
assert run == scope: "Malbuilt PSI: scopeNode="+scope+"; leafNode="+run+"; isAncestor="+ PsiTreeUtil.isAncestor(scope, run, false);
return lastElement;
}
private static TreeElement findNextLeafElementAt(ASTNode scopeNode, TreeElement last, int offset) {
int offsetR = offset;
if (last !=null) {
offsetR -= last.getStartOffset() - scopeNode.getStartOffset() + last.getTextLength();
while (offsetR >= 0) {
TreeElement next = last.getTreeNext();
if (next == null) {
last = last.getTreeParent();
continue;
}
int length = next.getTextLength();
offsetR -= length;
last = next;
}
scopeNode = last;
offsetR += scopeNode.getTextLength();
}
return (LeafElement)scopeNode.findLeafElementAt(offsetR);
}
//@RequiresReadAction
public static boolean processElementsContainingWordInElement(@NotNull final TextOccurenceProcessor processor,
@NotNull final PsiElement scope,
@NotNull final StringSearcher searcher,
final boolean processInjectedPsi,
final ProgressIndicator progress) {
if (progress != null) progress.checkCanceled();
PsiFile file = scope.getContainingFile();
FileViewProvider viewProvider = file.getViewProvider();
final CharSequence buffer = viewProvider.getContents();
TextRange range = scope.getTextRange();
if (range == null) {
LOG.error("Element " + scope + " of class " + scope.getClass() + " has null range");
return true;
}
final int scopeStart = range.getStartOffset();
final int startOffset = scopeStart;
int endOffset = range.getEndOffset();
if (endOffset > buffer.length()) {
diagnoseInvalidRange(scope, file, viewProvider, buffer, range);
return true;
}
final Project project = file.getProject();
final TreeElement[] lastElement = {null};
return processTextOccurrences(buffer, startOffset, endOffset, searcher, progress, new TIntProcedure() {
@Override
public boolean execute(int offset) {
if (progress != null) progress.checkCanceled();
lastElement[0] = processTreeUp(project, processor, scope, searcher, offset - scopeStart, processInjectedPsi, progress,
lastElement[0]);
return lastElement[0] != null;
}
});
}
private static void diagnoseInvalidRange(@NotNull PsiElement scope,
PsiFile file,
FileViewProvider viewProvider,
CharSequence buffer,
TextRange range) {
String msg = "Range for element: '" + scope + "' = " + range + " is out of file '" + file + "' range: " + file.getTextRange();
msg += "; file contents length: " + buffer.length();
msg += "\n file provider: " + viewProvider;
Document document = viewProvider.getDocument();
if (document != null) {
msg += "\n committed=" + PsiDocumentManager.getInstance(file.getProject()).isCommitted(document);
}
for (Language language : viewProvider.getLanguages()) {
final PsiFile root = viewProvider.getPsi(language);
msg += "\n root " + language + " length=" + root.getTextLength() + (root instanceof PsiFileImpl
? "; contentsLoaded=" + ((PsiFileImpl)root).isContentsLoaded() : "");
}
LOG.error(msg);
}
private static final ConcurrentMap<CharSequence, Map<StringSearcher, int[]>> cache = ContainerUtil.createConcurrentWeakMap();
public static boolean processTextOccurrences(@NotNull CharSequence text,
int startOffset,
int endOffset,
@NotNull StringSearcher searcher,
@Nullable ProgressIndicator progress,
@NotNull TIntProcedure processor) {
if (endOffset > text.length()) {
throw new IllegalArgumentException("end: " + endOffset + " > length: "+text.length());
}
Map<StringSearcher, int[]> cachedMap = cache.get(text);
int[] cachedOccurrences = cachedMap == null ? null : cachedMap.get(searcher);
if (cachedOccurrences == null) {
TIntArrayList occurrences = new TIntArrayList();
for (int index = 0; index < text.length(); index++) {
if (progress != null) progress.checkCanceled();
//noinspection AssignmentToForLoopParameter
index = searcher.scan(text, index, text.length());
if (index < 0) break;
if (checkJavaIdentifier(text, 0, text.length(), searcher, index)) {
occurrences.add(index);
}
}
cachedOccurrences = occurrences.toNativeArray();
if (cachedMap == null) {
cachedMap = ConcurrencyUtil.cacheOrGet(cache, text, ContainerUtil.createConcurrentSoftMap());
}
cachedMap.put(searcher, cachedOccurrences);
}
for (int index : cachedOccurrences) {
if (index >= endOffset) break;
if (index >= startOffset && !processor.execute(index)) {
return false;
}
}
return true;
}
private static boolean checkJavaIdentifier(@NotNull CharSequence text,
int startOffset,
int endOffset,
@NotNull StringSearcher searcher,
int index) {
if (!searcher.isJavaIdentifier()) {
return true;
}
if (index > startOffset) {
char c = text.charAt(index - 1);
if (Character.isJavaIdentifierPart(c) && c != '$') {
if (!searcher.isHandleEscapeSequences() || index < 2 || isEscapedBackslash(text, startOffset, index - 2)) { //escape sequence
return false;
}
}
else if (index > 0 && searcher.isHandleEscapeSequences() && !isEscapedBackslash(text, startOffset, index - 1)) {
return false;
}
}
final int patternLength = searcher.getPattern().length();
if (index + patternLength < endOffset) {
char c = text.charAt(index + patternLength);
if (Character.isJavaIdentifierPart(c) && c != '$') {
return false;
}
}
return true;
}
private static boolean isEscapedBackslash(CharSequence text, int startOffset, int index) {
return StringUtil.isEscapedBackslash(text, startOffset, index);
}
}
| |
/*
* Copyright 2016 Damian Terlecki.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.t3r1jj.gammaj.controllers;
import io.github.t3r1jj.gammaj.hotkeys.HotkeyPollerThread;
import io.github.t3r1jj.gammaj.model.ColorProfile;
import io.github.t3r1jj.gammaj.model.Gamma;
import io.github.t3r1jj.gammaj.model.Gamma.Channel;
import io.github.t3r1jj.gammaj.ViewModel;
import java.net.URL;
import java.util.Arrays;
import java.util.ResourceBundle;
import javafx.beans.binding.Bindings;
import javafx.beans.property.IntegerProperty;
import javafx.beans.property.ReadOnlyStringWrapper;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.scene.Cursor;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.cell.TextFieldTableCell;
import javafx.scene.input.MouseEvent;
import javafx.util.Callback;
import javafx.util.converter.IntegerStringConverter;
public class ManualTabController extends AbstractTabController {
private IntegerProperty[][] gammaRampProperties;
private int lastXIndex;
private int lastValue;
@FXML
private TableView tableView;
public ManualTabController(ViewModel viewModel) {
super(viewModel);
}
@Override
public void initialize(URL url, ResourceBundle rb) {
super.initialize(url, rb);
canvas.setCursor(Cursor.CROSSHAIR);
initializeTable();
addCanvasHandlers();
if (!viewModel.assistedAdjustmentProperty().get()) {
viewModel.setCurrentProfile(viewModel.getCurrentDisplay().getColorProfile());
}
viewModel.assistedAdjustmentProperty().addListener((observable, oldValue, nowAssisted) -> {
if (!nowAssisted) {
if (isCurrentProfileDefault() || isCurrentDisplayProfileAssisted()) {
resetProfile();
}
loadLocalProfile();
loadRampViewModel();
updateRgbRadioButtons();
drawGammaRamp();
addTabListeners();
} else {
removeTabListeners();
}
});
drawGammaRamp();
}
@Override
protected void handleLoadLocalProfile() {
if (isCurrentDisplayProfileAssisted()) {
viewModel.assistedAdjustmentProperty().set(true);
} else {
loadLocalProfile();
}
}
private void loadLocalProfile() {
loadingProfile = true;
ColorProfile colorProfile = viewModel.getCurrentDisplay().getColorProfile();
HotkeyPollerThread hotkey = colorProfile.getHotkey();
hotkeyInput.setHotkey(hotkey);
viewModel.getCurrentDisplay().loadModelFromProfile(true);
viewModel.getCurrentDisplay().setDeviceGammaRamp();
loadRampViewModel();
drawGammaRamp();
loadingProfile = false;
}
private boolean isCurrentDisplayProfileAssisted() {
return !isCurrentProfileDefault() && viewModel.getCurrentDisplay().getColorProfile().getModeIsAssissted();
}
@Override
protected void resetColorAdjustment() {
loadRampViewModel();
}
@Override
protected void bindTabListeners() {
if (!viewModel.assistedAdjustmentProperty().get()) {
addTabListeners();
}
}
private void addCanvasHandlers() {
canvas.addEventHandler(MouseEvent.MOUSE_PRESSED, event -> {
double eventX = event.getX();
if (eventX < 0) {
eventX = 0;
} else if (eventX > canvas.getWidth()) {
eventX = canvas.getWidth();
}
double eventY = event.getY();
if (eventY < 0) {
eventY = 0;
} else if (eventY > canvas.getHeight()) {
eventY = canvas.getHeight();
}
lastXIndex = (int) ((eventX / canvas.getWidth()) * (Gamma.CHANNEL_VALUES_COUNT - 1));
lastValue = (int) (((canvas.getHeight() - eventY) / canvas.getHeight()) * Gamma.MAX_WORD);
handleCanvasEvent(event);
});
canvas.addEventHandler(MouseEvent.MOUSE_DRAGGED, this::handleCanvasEvent);
canvas.addEventHandler(MouseEvent.MOUSE_RELEASED, event -> {
handleCanvasEvent(event);
resetProfile();
});
}
private void handleCanvasEvent(MouseEvent event) {
double eventX = event.getX();
if (eventX < 0) {
eventX = 0;
} else if (eventX > canvas.getWidth()) {
eventX = canvas.getWidth();
}
double eventY = event.getY();
if (eventY < 0) {
eventY = 0;
} else if (eventY > canvas.getHeight()) {
eventY = canvas.getHeight();
}
int x = (int) ((eventX / canvas.getWidth()) * (Gamma.CHANNEL_VALUES_COUNT - 1));
int value = (int) (((canvas.getHeight() - eventY) / canvas.getHeight()) * Gamma.MAX_WORD);
int dx = Math.abs(x - lastXIndex);
int dy = Math.abs(value - lastValue);
int sx = (lastXIndex < x) ? 1 : -1;
int sy = (lastValue < value) ? 1 : -1;
int error = dx - dy;
while (true) {
for (Channel channel : viewModel.selectedChannelsProperty()) {
viewModel.getCurrentDisplay().setGammaRampValue(channel, lastXIndex, lastValue);
gammaRampProperties[channel.getIndex()][lastXIndex].set(lastValue);
}
if (lastXIndex == x && lastValue == value) {
break;
}
int doubleError = error + error;
if (doubleError > -dy) {
error = error - dy;
lastXIndex = lastXIndex + sx;
}
if (doubleError < dx) {
error = error + dx;
lastValue = lastValue + sy;
}
}
viewModel.getCurrentDisplay().setDeviceGammaRamp();
drawGammaRamp();
}
@Override
protected void handleResetButtonAction(ActionEvent event) {
super.handleResetButtonAction(event);
loadRampViewModel();
}
private void initializeTable() {
int[][] gammaRamp = viewModel.getCurrentDisplay().getGammaRamp();
gammaRampProperties = new SimpleIntegerProperty[gammaRamp.length][gammaRamp[0].length];
for (int y = 0; y < gammaRamp.length; y++) {
for (int x = 0; x < gammaRamp[y].length; x++) {
gammaRampProperties[y][x] = new SimpleIntegerProperty(gammaRamp[y][x]);
}
}
TableColumn<Integer, String> firstTableColumn = new TableColumn<>(resources.getString("channel_index"));
firstTableColumn.getStyleClass().add("my-header-column");
firstTableColumn.sortableProperty().set(false);
firstTableColumn.setCellValueFactory(param -> {
switch (param.getValue()) {
case 0:
return new ReadOnlyStringWrapper(resources.getString("red"));
case 1:
return new ReadOnlyStringWrapper(resources.getString("green"));
case 2:
return new ReadOnlyStringWrapper(resources.getString("blue"));
default:
return new ReadOnlyStringWrapper(resources.getString("invalid"));
}
});
tableView.getColumns().add(firstTableColumn);
for (int i = 0; i < Gamma.CHANNEL_VALUES_COUNT; i++) {
TableColumn<Integer, Integer> column = new TableColumn<>(String.valueOf(i));
column.sortableProperty().set(false);
final int columnIndex = i;
column.setCellValueFactory(param -> gammaRampProperties[param.getValue()][columnIndex].asObject());
column.setCellFactory(TextFieldTableCell.<Integer, Integer>forTableColumn(new IntegerStringConverter()));
column.setOnEditCommit(event -> {
resetProfile();
if (event.getNewValue() < 0) {
gammaRampProperties[event.getRowValue()][columnIndex].set(0);
} else if (event.getNewValue() > Gamma.MAX_WORD) {
gammaRampProperties[event.getRowValue()][columnIndex].set(Gamma.MAX_WORD);
} else {
gammaRampProperties[event.getRowValue()][columnIndex].set(event.getNewValue());
}
viewModel.getCurrentDisplay().setGammaRampValue(Channel.getChannel(event.getRowValue()), columnIndex, event.getNewValue());
viewModel.getCurrentDisplay().setDeviceGammaRamp();
drawGammaRamp();
});
column.setEditable(true);
column.setPrefWidth(60);
tableView.getColumns().add(column);
}
tableView.getItems().addAll(Arrays.asList(0, 1, 2));
tableView.setEditable(true);
tableView.setFixedCellSize(25);
tableView.prefHeightProperty().bind(Bindings.size(tableView.getItems()).multiply(tableView.getFixedCellSize()).add(40));
}
private void loadRampViewModel() {
int[][] gammaRamp = viewModel.getCurrentDisplay().getGammaRamp();
for (int y = 0; y < gammaRamp.length; y++) {
for (int x = 0; x < gammaRamp[y].length; x++) {
gammaRampProperties[y][x].set(gammaRamp[y][x]);
}
}
}
@Override
protected void handleInvertButtonAction(ActionEvent event) {
if (!loadingProfile) {
resetProfile();
for (Gamma.Channel channel : viewModel.selectedChannelsProperty()) {
for (int x = 0; x < Gamma.CHANNEL_VALUES_COUNT; x++) {
gammaRampProperties[channel.getIndex()][x].set(Gamma.MAX_WORD - gammaRampProperties[channel.getIndex()][x].get());
viewModel.getCurrentDisplay().setGammaRampValue(channel, x, gammaRampProperties[channel.getIndex()][x].get());
}
}
viewModel.getCurrentDisplay().setDeviceGammaRamp();
drawGammaRamp();
}
}
@Override
protected void saveModeSettings(ColorProfile newColorProfile) {
newColorProfile.setModeIsAssissted(false);
}
@Override
protected void resetProfile() {
super.resetProfile();
viewModel.getCurrentDisplay().getColorProfile().setGammaRamp(viewModel.getCurrentDisplay().getGammaRamp());
}
}
| |
/**
*/
package SurveyModel.impl;
import SurveyModel.Category;
import SurveyModel.Page;
import SurveyModel.SurveyModelPackage;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Category</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link SurveyModel.impl.CategoryImpl#getPages <em>Pages</em>}</li>
* <li>{@link SurveyModel.impl.CategoryImpl#getTitle <em>Title</em>}</li>
* <li>{@link SurveyModel.impl.CategoryImpl#getDescription <em>Description</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class CategoryImpl extends MinimalEObjectImpl.Container implements Category {
/**
* The cached value of the '{@link #getPages() <em>Pages</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPages()
* @generated
* @ordered
*/
protected EList<Page> pages;
/**
* The default value of the '{@link #getTitle() <em>Title</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTitle()
* @generated
* @ordered
*/
protected static final String TITLE_EDEFAULT = null;
/**
* The cached value of the '{@link #getTitle() <em>Title</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getTitle()
* @generated
* @ordered
*/
protected String title = TITLE_EDEFAULT;
/**
* The default value of the '{@link #getDescription() <em>Description</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDescription()
* @generated
* @ordered
*/
protected static final String DESCRIPTION_EDEFAULT = null;
/**
* The cached value of the '{@link #getDescription() <em>Description</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDescription()
* @generated
* @ordered
*/
protected String description = DESCRIPTION_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected CategoryImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return SurveyModelPackage.Literals.CATEGORY;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Page> getPages() {
if (pages == null) {
pages = new EObjectContainmentEList<Page>(Page.class, this, SurveyModelPackage.CATEGORY__PAGES);
}
return pages;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getTitle() {
return title;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setTitle(String newTitle) {
String oldTitle = title;
title = newTitle;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, SurveyModelPackage.CATEGORY__TITLE, oldTitle, title));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getDescription() {
return description;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setDescription(String newDescription) {
String oldDescription = description;
description = newDescription;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, SurveyModelPackage.CATEGORY__DESCRIPTION, oldDescription, description));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case SurveyModelPackage.CATEGORY__PAGES:
return ((InternalEList<?>)getPages()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case SurveyModelPackage.CATEGORY__PAGES:
return getPages();
case SurveyModelPackage.CATEGORY__TITLE:
return getTitle();
case SurveyModelPackage.CATEGORY__DESCRIPTION:
return getDescription();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case SurveyModelPackage.CATEGORY__PAGES:
getPages().clear();
getPages().addAll((Collection<? extends Page>)newValue);
return;
case SurveyModelPackage.CATEGORY__TITLE:
setTitle((String)newValue);
return;
case SurveyModelPackage.CATEGORY__DESCRIPTION:
setDescription((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case SurveyModelPackage.CATEGORY__PAGES:
getPages().clear();
return;
case SurveyModelPackage.CATEGORY__TITLE:
setTitle(TITLE_EDEFAULT);
return;
case SurveyModelPackage.CATEGORY__DESCRIPTION:
setDescription(DESCRIPTION_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case SurveyModelPackage.CATEGORY__PAGES:
return pages != null && !pages.isEmpty();
case SurveyModelPackage.CATEGORY__TITLE:
return TITLE_EDEFAULT == null ? title != null : !TITLE_EDEFAULT.equals(title);
case SurveyModelPackage.CATEGORY__DESCRIPTION:
return DESCRIPTION_EDEFAULT == null ? description != null : !DESCRIPTION_EDEFAULT.equals(description);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (title: ");
result.append(title);
result.append(", description: ");
result.append(description);
result.append(')');
return result.toString();
}
} //CategoryImpl
| |
/*
* Copyright 2015 Torridity.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tor.tribes.util;
import de.tor.tribes.util.xml.JaxenUtils;
import java.awt.Dimension;
import java.io.File;
import org.apache.log4j.Logger;
import org.jdom.Document;
/**
*
* @author Charon
*/
public class ServerSettings {
private static Logger logger = Logger.getLogger("ServerSettings");
private String SERVER_ID = "de26";
private int COORD = 2;
private Dimension mapSize = null;
private int BONUS_NEW = 0;
private int SNOB_RANGE = 70;
private boolean church = false;
private boolean millisArrival = true;
private double speed = 1.0;
private double riseSpeed = 1.0;
private boolean nightBonusActive = true;
private int nightBonusStartHour = 0;
private int nightBonusEndHour = 8;
private static ServerSettings SINGLETON = null;
public static synchronized ServerSettings getSingleton() {
if (SINGLETON == null) {
SINGLETON = new ServerSettings();
}
return SINGLETON;
}
public boolean loadSettings(String pServerID) {
try {
logger.debug("Loading server settings");
setServerID(pServerID);
String serverPath = Constants.SERVER_DIR + "/" + SERVER_ID + "/settings.xml";
logger.debug("Parse server settings from '" + serverPath + "'");
Document d = JaxenUtils.getDocument(new File(serverPath));
logger.debug(" - reading map system");
try {
setCoordType(1000);
} catch (Exception inner) {
setCoordType(1000);
}
logger.debug(" - reading bonus type");
try {
setNewBonus(Integer.parseInt(JaxenUtils.getNodeValue(d, "//coord/bonus_new")));
} catch (Exception inner) {
setNewBonus(0);
}
logger.debug(" - reading snob distance");
try {
setSnobRange(Integer.parseInt(JaxenUtils.getNodeValue(d, "//snob/max_dist")));
} catch (Exception inner) {
setSnobRange(70);
}
logger.debug(" - reading church setting");
try {
setChurch(Integer.parseInt(JaxenUtils.getNodeValue(d, "//game/church")) == 1);
} catch (Exception inner) {
setChurch(false);
}
logger.debug(" - reading millis setting");
try {
setMillisArrival(Integer.parseInt(JaxenUtils.getNodeValue(d, "//misc/millis_arrival")) == 1);
} catch (Exception inner) {
try {//new settings is under "commands"
setMillisArrival(Integer.parseInt(JaxenUtils.getNodeValue(d, "//commands/millis_arrival")) == 1);
} catch (Exception inner2) {
//empty or invalid value...use no millis
setMillisArrival(false);
}
}
logger.debug(" - reading server speed");
try {
setSpeed(Double.parseDouble(JaxenUtils.getNodeValue(d, "//speed")));
} catch (Exception inner) {
setSpeed(1.0);
}
logger.debug(" - reading rise speed");
try {
setRiseSpeed(Double.parseDouble(JaxenUtils.getNodeValue(d, "//snob/rise")));
} catch (Exception inner) {
setRiseSpeed(1.0);
}
logger.debug(" - reading night bonus");
try {
setNightBonusActive(Integer.parseInt(JaxenUtils.getNodeValue(d, "//night/active")) == 1);
} catch (Exception inner) {
setNightBonusActive(true);
}
logger.debug(" - reading night bonus start hour");
try {
setNightBonusStartHour(Integer.parseInt(JaxenUtils.getNodeValue(d, "//night/start_hour")));
} catch (Exception inner) {
setNightBonusStartHour(0);
}
logger.debug(" - reading night bonus end hour");
try {
setNightBonusStartHour(Integer.parseInt(JaxenUtils.getNodeValue(d, "//night/end_hour")));
} catch (Exception inner) {
setNightBonusEndHour(8);
}
} catch (Exception e) {
logger.error("Failed to load server settings", e);
return false;
}
logger.debug("Successfully read settings for server '" + SERVER_ID + "'");
return true;
}
public void setServerID(String pServerID) {
SERVER_ID = pServerID;
}
public String getServerID() {
return SERVER_ID;
}
public void setCoordType(int pMapSize) {
if (pMapSize == 1000) {
COORD = 2;
} else if (pMapSize == 500) {
COORD = 1;
} else {
throw new IllegalArgumentException("Invalid map size (" + pMapSize + "). Falling back to 1000x1000.");
}
switch (COORD) {
case 1: {
mapSize = new Dimension(pMapSize, pMapSize);
break;
}
default: {
mapSize = new Dimension(pMapSize, pMapSize);
}
}
}
public int getCoordType() {
return COORD;
}
public Dimension getMapDimension() {
if (mapSize == null) {
return new Dimension(1000, 1000);
}
return mapSize;
}
public void setNewBonus(int pNewBonus) {
BONUS_NEW = pNewBonus;
}
public int getNewBonus() {
return BONUS_NEW;
}
public void setSnobRange(int pSnobRange) {
SNOB_RANGE = pSnobRange;
}
public int getSnobRange() {
return SNOB_RANGE;
}
public void setChurch(boolean v) {
church = v;
}
public boolean isChurch() {
return church;
}
public void setMillisArrival(boolean v) {
millisArrival = v;
}
public boolean isMillisArrival() {
return millisArrival;
}
public void setSpeed(double speed) {
this.speed = speed;
}
public double getSpeed() {
return speed;
}
public void setRiseSpeed(double speed) {
this.speed = speed;
}
public double getRiseSpeed() {
return speed;
}
public void setNightBonusActive(boolean nightBonusActive) {
this.nightBonusActive = nightBonusActive;
}
public boolean isNightBonusActive() {
return nightBonusActive;
}
public void setNightBonusStartHour(int nightBonusStartHour) {
this.nightBonusStartHour = nightBonusStartHour;
}
public int getNightBonusStartHour() {
return nightBonusStartHour;
}
public void setNightBonusEndHour(int nightBonusEndHour) {
this.nightBonusEndHour = nightBonusEndHour;
}
public int getNightBonusEndHour() {
return nightBonusEndHour;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue;
import javax.annotation.Generated;
import com.amazonaws.services.glue.model.*;
/**
* Abstract implementation of {@code AWSGlueAsync}. Convenient method forms pass through to the corresponding overload
* that takes a request object and an {@code AsyncHandler}, which throws an {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAWSGlueAsync extends AbstractAWSGlue implements AWSGlueAsync {
protected AbstractAWSGlueAsync() {
}
@Override
public java.util.concurrent.Future<BatchCreatePartitionResult> batchCreatePartitionAsync(BatchCreatePartitionRequest request) {
return batchCreatePartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchCreatePartitionResult> batchCreatePartitionAsync(BatchCreatePartitionRequest request,
com.amazonaws.handlers.AsyncHandler<BatchCreatePartitionRequest, BatchCreatePartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchDeleteConnectionResult> batchDeleteConnectionAsync(BatchDeleteConnectionRequest request) {
return batchDeleteConnectionAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchDeleteConnectionResult> batchDeleteConnectionAsync(BatchDeleteConnectionRequest request,
com.amazonaws.handlers.AsyncHandler<BatchDeleteConnectionRequest, BatchDeleteConnectionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchDeletePartitionResult> batchDeletePartitionAsync(BatchDeletePartitionRequest request) {
return batchDeletePartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchDeletePartitionResult> batchDeletePartitionAsync(BatchDeletePartitionRequest request,
com.amazonaws.handlers.AsyncHandler<BatchDeletePartitionRequest, BatchDeletePartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchDeleteTableResult> batchDeleteTableAsync(BatchDeleteTableRequest request) {
return batchDeleteTableAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchDeleteTableResult> batchDeleteTableAsync(BatchDeleteTableRequest request,
com.amazonaws.handlers.AsyncHandler<BatchDeleteTableRequest, BatchDeleteTableResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchDeleteTableVersionResult> batchDeleteTableVersionAsync(BatchDeleteTableVersionRequest request) {
return batchDeleteTableVersionAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchDeleteTableVersionResult> batchDeleteTableVersionAsync(BatchDeleteTableVersionRequest request,
com.amazonaws.handlers.AsyncHandler<BatchDeleteTableVersionRequest, BatchDeleteTableVersionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchGetBlueprintsResult> batchGetBlueprintsAsync(BatchGetBlueprintsRequest request) {
return batchGetBlueprintsAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchGetBlueprintsResult> batchGetBlueprintsAsync(BatchGetBlueprintsRequest request,
com.amazonaws.handlers.AsyncHandler<BatchGetBlueprintsRequest, BatchGetBlueprintsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchGetCrawlersResult> batchGetCrawlersAsync(BatchGetCrawlersRequest request) {
return batchGetCrawlersAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchGetCrawlersResult> batchGetCrawlersAsync(BatchGetCrawlersRequest request,
com.amazonaws.handlers.AsyncHandler<BatchGetCrawlersRequest, BatchGetCrawlersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchGetDevEndpointsResult> batchGetDevEndpointsAsync(BatchGetDevEndpointsRequest request) {
return batchGetDevEndpointsAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchGetDevEndpointsResult> batchGetDevEndpointsAsync(BatchGetDevEndpointsRequest request,
com.amazonaws.handlers.AsyncHandler<BatchGetDevEndpointsRequest, BatchGetDevEndpointsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchGetJobsResult> batchGetJobsAsync(BatchGetJobsRequest request) {
return batchGetJobsAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchGetJobsResult> batchGetJobsAsync(BatchGetJobsRequest request,
com.amazonaws.handlers.AsyncHandler<BatchGetJobsRequest, BatchGetJobsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchGetPartitionResult> batchGetPartitionAsync(BatchGetPartitionRequest request) {
return batchGetPartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchGetPartitionResult> batchGetPartitionAsync(BatchGetPartitionRequest request,
com.amazonaws.handlers.AsyncHandler<BatchGetPartitionRequest, BatchGetPartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchGetTriggersResult> batchGetTriggersAsync(BatchGetTriggersRequest request) {
return batchGetTriggersAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchGetTriggersResult> batchGetTriggersAsync(BatchGetTriggersRequest request,
com.amazonaws.handlers.AsyncHandler<BatchGetTriggersRequest, BatchGetTriggersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchGetWorkflowsResult> batchGetWorkflowsAsync(BatchGetWorkflowsRequest request) {
return batchGetWorkflowsAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchGetWorkflowsResult> batchGetWorkflowsAsync(BatchGetWorkflowsRequest request,
com.amazonaws.handlers.AsyncHandler<BatchGetWorkflowsRequest, BatchGetWorkflowsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchStopJobRunResult> batchStopJobRunAsync(BatchStopJobRunRequest request) {
return batchStopJobRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchStopJobRunResult> batchStopJobRunAsync(BatchStopJobRunRequest request,
com.amazonaws.handlers.AsyncHandler<BatchStopJobRunRequest, BatchStopJobRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<BatchUpdatePartitionResult> batchUpdatePartitionAsync(BatchUpdatePartitionRequest request) {
return batchUpdatePartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<BatchUpdatePartitionResult> batchUpdatePartitionAsync(BatchUpdatePartitionRequest request,
com.amazonaws.handlers.AsyncHandler<BatchUpdatePartitionRequest, BatchUpdatePartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CancelMLTaskRunResult> cancelMLTaskRunAsync(CancelMLTaskRunRequest request) {
return cancelMLTaskRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<CancelMLTaskRunResult> cancelMLTaskRunAsync(CancelMLTaskRunRequest request,
com.amazonaws.handlers.AsyncHandler<CancelMLTaskRunRequest, CancelMLTaskRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CheckSchemaVersionValidityResult> checkSchemaVersionValidityAsync(CheckSchemaVersionValidityRequest request) {
return checkSchemaVersionValidityAsync(request, null);
}
@Override
public java.util.concurrent.Future<CheckSchemaVersionValidityResult> checkSchemaVersionValidityAsync(CheckSchemaVersionValidityRequest request,
com.amazonaws.handlers.AsyncHandler<CheckSchemaVersionValidityRequest, CheckSchemaVersionValidityResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateBlueprintResult> createBlueprintAsync(CreateBlueprintRequest request) {
return createBlueprintAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateBlueprintResult> createBlueprintAsync(CreateBlueprintRequest request,
com.amazonaws.handlers.AsyncHandler<CreateBlueprintRequest, CreateBlueprintResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateClassifierResult> createClassifierAsync(CreateClassifierRequest request) {
return createClassifierAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateClassifierResult> createClassifierAsync(CreateClassifierRequest request,
com.amazonaws.handlers.AsyncHandler<CreateClassifierRequest, CreateClassifierResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateConnectionResult> createConnectionAsync(CreateConnectionRequest request) {
return createConnectionAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateConnectionResult> createConnectionAsync(CreateConnectionRequest request,
com.amazonaws.handlers.AsyncHandler<CreateConnectionRequest, CreateConnectionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateCrawlerResult> createCrawlerAsync(CreateCrawlerRequest request) {
return createCrawlerAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateCrawlerResult> createCrawlerAsync(CreateCrawlerRequest request,
com.amazonaws.handlers.AsyncHandler<CreateCrawlerRequest, CreateCrawlerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateDatabaseResult> createDatabaseAsync(CreateDatabaseRequest request) {
return createDatabaseAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateDatabaseResult> createDatabaseAsync(CreateDatabaseRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDatabaseRequest, CreateDatabaseResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateDevEndpointResult> createDevEndpointAsync(CreateDevEndpointRequest request) {
return createDevEndpointAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateDevEndpointResult> createDevEndpointAsync(CreateDevEndpointRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDevEndpointRequest, CreateDevEndpointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateJobResult> createJobAsync(CreateJobRequest request) {
return createJobAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateJobResult> createJobAsync(CreateJobRequest request,
com.amazonaws.handlers.AsyncHandler<CreateJobRequest, CreateJobResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateMLTransformResult> createMLTransformAsync(CreateMLTransformRequest request) {
return createMLTransformAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateMLTransformResult> createMLTransformAsync(CreateMLTransformRequest request,
com.amazonaws.handlers.AsyncHandler<CreateMLTransformRequest, CreateMLTransformResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreatePartitionResult> createPartitionAsync(CreatePartitionRequest request) {
return createPartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreatePartitionResult> createPartitionAsync(CreatePartitionRequest request,
com.amazonaws.handlers.AsyncHandler<CreatePartitionRequest, CreatePartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreatePartitionIndexResult> createPartitionIndexAsync(CreatePartitionIndexRequest request) {
return createPartitionIndexAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreatePartitionIndexResult> createPartitionIndexAsync(CreatePartitionIndexRequest request,
com.amazonaws.handlers.AsyncHandler<CreatePartitionIndexRequest, CreatePartitionIndexResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateRegistryResult> createRegistryAsync(CreateRegistryRequest request) {
return createRegistryAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateRegistryResult> createRegistryAsync(CreateRegistryRequest request,
com.amazonaws.handlers.AsyncHandler<CreateRegistryRequest, CreateRegistryResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateSchemaResult> createSchemaAsync(CreateSchemaRequest request) {
return createSchemaAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateSchemaResult> createSchemaAsync(CreateSchemaRequest request,
com.amazonaws.handlers.AsyncHandler<CreateSchemaRequest, CreateSchemaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateScriptResult> createScriptAsync(CreateScriptRequest request) {
return createScriptAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateScriptResult> createScriptAsync(CreateScriptRequest request,
com.amazonaws.handlers.AsyncHandler<CreateScriptRequest, CreateScriptResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateSecurityConfigurationResult> createSecurityConfigurationAsync(CreateSecurityConfigurationRequest request) {
return createSecurityConfigurationAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateSecurityConfigurationResult> createSecurityConfigurationAsync(CreateSecurityConfigurationRequest request,
com.amazonaws.handlers.AsyncHandler<CreateSecurityConfigurationRequest, CreateSecurityConfigurationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateTableResult> createTableAsync(CreateTableRequest request) {
return createTableAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateTableResult> createTableAsync(CreateTableRequest request,
com.amazonaws.handlers.AsyncHandler<CreateTableRequest, CreateTableResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateTriggerResult> createTriggerAsync(CreateTriggerRequest request) {
return createTriggerAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateTriggerResult> createTriggerAsync(CreateTriggerRequest request,
com.amazonaws.handlers.AsyncHandler<CreateTriggerRequest, CreateTriggerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateUserDefinedFunctionResult> createUserDefinedFunctionAsync(CreateUserDefinedFunctionRequest request) {
return createUserDefinedFunctionAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateUserDefinedFunctionResult> createUserDefinedFunctionAsync(CreateUserDefinedFunctionRequest request,
com.amazonaws.handlers.AsyncHandler<CreateUserDefinedFunctionRequest, CreateUserDefinedFunctionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<CreateWorkflowResult> createWorkflowAsync(CreateWorkflowRequest request) {
return createWorkflowAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateWorkflowResult> createWorkflowAsync(CreateWorkflowRequest request,
com.amazonaws.handlers.AsyncHandler<CreateWorkflowRequest, CreateWorkflowResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteBlueprintResult> deleteBlueprintAsync(DeleteBlueprintRequest request) {
return deleteBlueprintAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteBlueprintResult> deleteBlueprintAsync(DeleteBlueprintRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteBlueprintRequest, DeleteBlueprintResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteClassifierResult> deleteClassifierAsync(DeleteClassifierRequest request) {
return deleteClassifierAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteClassifierResult> deleteClassifierAsync(DeleteClassifierRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteClassifierRequest, DeleteClassifierResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteColumnStatisticsForPartitionResult> deleteColumnStatisticsForPartitionAsync(
DeleteColumnStatisticsForPartitionRequest request) {
return deleteColumnStatisticsForPartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteColumnStatisticsForPartitionResult> deleteColumnStatisticsForPartitionAsync(
DeleteColumnStatisticsForPartitionRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteColumnStatisticsForPartitionRequest, DeleteColumnStatisticsForPartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteColumnStatisticsForTableResult> deleteColumnStatisticsForTableAsync(DeleteColumnStatisticsForTableRequest request) {
return deleteColumnStatisticsForTableAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteColumnStatisticsForTableResult> deleteColumnStatisticsForTableAsync(DeleteColumnStatisticsForTableRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteColumnStatisticsForTableRequest, DeleteColumnStatisticsForTableResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteConnectionResult> deleteConnectionAsync(DeleteConnectionRequest request) {
return deleteConnectionAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteConnectionResult> deleteConnectionAsync(DeleteConnectionRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteConnectionRequest, DeleteConnectionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteCrawlerResult> deleteCrawlerAsync(DeleteCrawlerRequest request) {
return deleteCrawlerAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteCrawlerResult> deleteCrawlerAsync(DeleteCrawlerRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteCrawlerRequest, DeleteCrawlerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteDatabaseResult> deleteDatabaseAsync(DeleteDatabaseRequest request) {
return deleteDatabaseAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteDatabaseResult> deleteDatabaseAsync(DeleteDatabaseRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDatabaseRequest, DeleteDatabaseResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteDevEndpointResult> deleteDevEndpointAsync(DeleteDevEndpointRequest request) {
return deleteDevEndpointAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteDevEndpointResult> deleteDevEndpointAsync(DeleteDevEndpointRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDevEndpointRequest, DeleteDevEndpointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteJobResult> deleteJobAsync(DeleteJobRequest request) {
return deleteJobAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteJobResult> deleteJobAsync(DeleteJobRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteJobRequest, DeleteJobResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteMLTransformResult> deleteMLTransformAsync(DeleteMLTransformRequest request) {
return deleteMLTransformAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteMLTransformResult> deleteMLTransformAsync(DeleteMLTransformRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteMLTransformRequest, DeleteMLTransformResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeletePartitionResult> deletePartitionAsync(DeletePartitionRequest request) {
return deletePartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeletePartitionResult> deletePartitionAsync(DeletePartitionRequest request,
com.amazonaws.handlers.AsyncHandler<DeletePartitionRequest, DeletePartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeletePartitionIndexResult> deletePartitionIndexAsync(DeletePartitionIndexRequest request) {
return deletePartitionIndexAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeletePartitionIndexResult> deletePartitionIndexAsync(DeletePartitionIndexRequest request,
com.amazonaws.handlers.AsyncHandler<DeletePartitionIndexRequest, DeletePartitionIndexResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteRegistryResult> deleteRegistryAsync(DeleteRegistryRequest request) {
return deleteRegistryAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteRegistryResult> deleteRegistryAsync(DeleteRegistryRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteRegistryRequest, DeleteRegistryResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteResourcePolicyResult> deleteResourcePolicyAsync(DeleteResourcePolicyRequest request) {
return deleteResourcePolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteResourcePolicyResult> deleteResourcePolicyAsync(DeleteResourcePolicyRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteResourcePolicyRequest, DeleteResourcePolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteSchemaResult> deleteSchemaAsync(DeleteSchemaRequest request) {
return deleteSchemaAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteSchemaResult> deleteSchemaAsync(DeleteSchemaRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteSchemaRequest, DeleteSchemaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteSchemaVersionsResult> deleteSchemaVersionsAsync(DeleteSchemaVersionsRequest request) {
return deleteSchemaVersionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteSchemaVersionsResult> deleteSchemaVersionsAsync(DeleteSchemaVersionsRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteSchemaVersionsRequest, DeleteSchemaVersionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteSecurityConfigurationResult> deleteSecurityConfigurationAsync(DeleteSecurityConfigurationRequest request) {
return deleteSecurityConfigurationAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteSecurityConfigurationResult> deleteSecurityConfigurationAsync(DeleteSecurityConfigurationRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteSecurityConfigurationRequest, DeleteSecurityConfigurationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteTableResult> deleteTableAsync(DeleteTableRequest request) {
return deleteTableAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteTableResult> deleteTableAsync(DeleteTableRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteTableRequest, DeleteTableResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteTableVersionResult> deleteTableVersionAsync(DeleteTableVersionRequest request) {
return deleteTableVersionAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteTableVersionResult> deleteTableVersionAsync(DeleteTableVersionRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteTableVersionRequest, DeleteTableVersionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteTriggerResult> deleteTriggerAsync(DeleteTriggerRequest request) {
return deleteTriggerAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteTriggerResult> deleteTriggerAsync(DeleteTriggerRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteTriggerRequest, DeleteTriggerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteUserDefinedFunctionResult> deleteUserDefinedFunctionAsync(DeleteUserDefinedFunctionRequest request) {
return deleteUserDefinedFunctionAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteUserDefinedFunctionResult> deleteUserDefinedFunctionAsync(DeleteUserDefinedFunctionRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteUserDefinedFunctionRequest, DeleteUserDefinedFunctionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteWorkflowResult> deleteWorkflowAsync(DeleteWorkflowRequest request) {
return deleteWorkflowAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteWorkflowResult> deleteWorkflowAsync(DeleteWorkflowRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteWorkflowRequest, DeleteWorkflowResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetBlueprintResult> getBlueprintAsync(GetBlueprintRequest request) {
return getBlueprintAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetBlueprintResult> getBlueprintAsync(GetBlueprintRequest request,
com.amazonaws.handlers.AsyncHandler<GetBlueprintRequest, GetBlueprintResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetBlueprintRunResult> getBlueprintRunAsync(GetBlueprintRunRequest request) {
return getBlueprintRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetBlueprintRunResult> getBlueprintRunAsync(GetBlueprintRunRequest request,
com.amazonaws.handlers.AsyncHandler<GetBlueprintRunRequest, GetBlueprintRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetBlueprintRunsResult> getBlueprintRunsAsync(GetBlueprintRunsRequest request) {
return getBlueprintRunsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetBlueprintRunsResult> getBlueprintRunsAsync(GetBlueprintRunsRequest request,
com.amazonaws.handlers.AsyncHandler<GetBlueprintRunsRequest, GetBlueprintRunsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetCatalogImportStatusResult> getCatalogImportStatusAsync(GetCatalogImportStatusRequest request) {
return getCatalogImportStatusAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetCatalogImportStatusResult> getCatalogImportStatusAsync(GetCatalogImportStatusRequest request,
com.amazonaws.handlers.AsyncHandler<GetCatalogImportStatusRequest, GetCatalogImportStatusResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetClassifierResult> getClassifierAsync(GetClassifierRequest request) {
return getClassifierAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetClassifierResult> getClassifierAsync(GetClassifierRequest request,
com.amazonaws.handlers.AsyncHandler<GetClassifierRequest, GetClassifierResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetClassifiersResult> getClassifiersAsync(GetClassifiersRequest request) {
return getClassifiersAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetClassifiersResult> getClassifiersAsync(GetClassifiersRequest request,
com.amazonaws.handlers.AsyncHandler<GetClassifiersRequest, GetClassifiersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetColumnStatisticsForPartitionResult> getColumnStatisticsForPartitionAsync(
GetColumnStatisticsForPartitionRequest request) {
return getColumnStatisticsForPartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetColumnStatisticsForPartitionResult> getColumnStatisticsForPartitionAsync(
GetColumnStatisticsForPartitionRequest request,
com.amazonaws.handlers.AsyncHandler<GetColumnStatisticsForPartitionRequest, GetColumnStatisticsForPartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetColumnStatisticsForTableResult> getColumnStatisticsForTableAsync(GetColumnStatisticsForTableRequest request) {
return getColumnStatisticsForTableAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetColumnStatisticsForTableResult> getColumnStatisticsForTableAsync(GetColumnStatisticsForTableRequest request,
com.amazonaws.handlers.AsyncHandler<GetColumnStatisticsForTableRequest, GetColumnStatisticsForTableResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetConnectionResult> getConnectionAsync(GetConnectionRequest request) {
return getConnectionAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetConnectionResult> getConnectionAsync(GetConnectionRequest request,
com.amazonaws.handlers.AsyncHandler<GetConnectionRequest, GetConnectionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetConnectionsResult> getConnectionsAsync(GetConnectionsRequest request) {
return getConnectionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetConnectionsResult> getConnectionsAsync(GetConnectionsRequest request,
com.amazonaws.handlers.AsyncHandler<GetConnectionsRequest, GetConnectionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetCrawlerResult> getCrawlerAsync(GetCrawlerRequest request) {
return getCrawlerAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetCrawlerResult> getCrawlerAsync(GetCrawlerRequest request,
com.amazonaws.handlers.AsyncHandler<GetCrawlerRequest, GetCrawlerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetCrawlerMetricsResult> getCrawlerMetricsAsync(GetCrawlerMetricsRequest request) {
return getCrawlerMetricsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetCrawlerMetricsResult> getCrawlerMetricsAsync(GetCrawlerMetricsRequest request,
com.amazonaws.handlers.AsyncHandler<GetCrawlerMetricsRequest, GetCrawlerMetricsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetCrawlersResult> getCrawlersAsync(GetCrawlersRequest request) {
return getCrawlersAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetCrawlersResult> getCrawlersAsync(GetCrawlersRequest request,
com.amazonaws.handlers.AsyncHandler<GetCrawlersRequest, GetCrawlersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetDataCatalogEncryptionSettingsResult> getDataCatalogEncryptionSettingsAsync(
GetDataCatalogEncryptionSettingsRequest request) {
return getDataCatalogEncryptionSettingsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetDataCatalogEncryptionSettingsResult> getDataCatalogEncryptionSettingsAsync(
GetDataCatalogEncryptionSettingsRequest request,
com.amazonaws.handlers.AsyncHandler<GetDataCatalogEncryptionSettingsRequest, GetDataCatalogEncryptionSettingsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetDatabaseResult> getDatabaseAsync(GetDatabaseRequest request) {
return getDatabaseAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetDatabaseResult> getDatabaseAsync(GetDatabaseRequest request,
com.amazonaws.handlers.AsyncHandler<GetDatabaseRequest, GetDatabaseResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetDatabasesResult> getDatabasesAsync(GetDatabasesRequest request) {
return getDatabasesAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetDatabasesResult> getDatabasesAsync(GetDatabasesRequest request,
com.amazonaws.handlers.AsyncHandler<GetDatabasesRequest, GetDatabasesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetDataflowGraphResult> getDataflowGraphAsync(GetDataflowGraphRequest request) {
return getDataflowGraphAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetDataflowGraphResult> getDataflowGraphAsync(GetDataflowGraphRequest request,
com.amazonaws.handlers.AsyncHandler<GetDataflowGraphRequest, GetDataflowGraphResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetDevEndpointResult> getDevEndpointAsync(GetDevEndpointRequest request) {
return getDevEndpointAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetDevEndpointResult> getDevEndpointAsync(GetDevEndpointRequest request,
com.amazonaws.handlers.AsyncHandler<GetDevEndpointRequest, GetDevEndpointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetDevEndpointsResult> getDevEndpointsAsync(GetDevEndpointsRequest request) {
return getDevEndpointsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetDevEndpointsResult> getDevEndpointsAsync(GetDevEndpointsRequest request,
com.amazonaws.handlers.AsyncHandler<GetDevEndpointsRequest, GetDevEndpointsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetJobResult> getJobAsync(GetJobRequest request) {
return getJobAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetJobResult> getJobAsync(GetJobRequest request,
com.amazonaws.handlers.AsyncHandler<GetJobRequest, GetJobResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetJobBookmarkResult> getJobBookmarkAsync(GetJobBookmarkRequest request) {
return getJobBookmarkAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetJobBookmarkResult> getJobBookmarkAsync(GetJobBookmarkRequest request,
com.amazonaws.handlers.AsyncHandler<GetJobBookmarkRequest, GetJobBookmarkResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetJobRunResult> getJobRunAsync(GetJobRunRequest request) {
return getJobRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetJobRunResult> getJobRunAsync(GetJobRunRequest request,
com.amazonaws.handlers.AsyncHandler<GetJobRunRequest, GetJobRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetJobRunsResult> getJobRunsAsync(GetJobRunsRequest request) {
return getJobRunsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetJobRunsResult> getJobRunsAsync(GetJobRunsRequest request,
com.amazonaws.handlers.AsyncHandler<GetJobRunsRequest, GetJobRunsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetJobsResult> getJobsAsync(GetJobsRequest request) {
return getJobsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetJobsResult> getJobsAsync(GetJobsRequest request,
com.amazonaws.handlers.AsyncHandler<GetJobsRequest, GetJobsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetMLTaskRunResult> getMLTaskRunAsync(GetMLTaskRunRequest request) {
return getMLTaskRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetMLTaskRunResult> getMLTaskRunAsync(GetMLTaskRunRequest request,
com.amazonaws.handlers.AsyncHandler<GetMLTaskRunRequest, GetMLTaskRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetMLTaskRunsResult> getMLTaskRunsAsync(GetMLTaskRunsRequest request) {
return getMLTaskRunsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetMLTaskRunsResult> getMLTaskRunsAsync(GetMLTaskRunsRequest request,
com.amazonaws.handlers.AsyncHandler<GetMLTaskRunsRequest, GetMLTaskRunsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetMLTransformResult> getMLTransformAsync(GetMLTransformRequest request) {
return getMLTransformAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetMLTransformResult> getMLTransformAsync(GetMLTransformRequest request,
com.amazonaws.handlers.AsyncHandler<GetMLTransformRequest, GetMLTransformResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetMLTransformsResult> getMLTransformsAsync(GetMLTransformsRequest request) {
return getMLTransformsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetMLTransformsResult> getMLTransformsAsync(GetMLTransformsRequest request,
com.amazonaws.handlers.AsyncHandler<GetMLTransformsRequest, GetMLTransformsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetMappingResult> getMappingAsync(GetMappingRequest request) {
return getMappingAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetMappingResult> getMappingAsync(GetMappingRequest request,
com.amazonaws.handlers.AsyncHandler<GetMappingRequest, GetMappingResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetPartitionResult> getPartitionAsync(GetPartitionRequest request) {
return getPartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetPartitionResult> getPartitionAsync(GetPartitionRequest request,
com.amazonaws.handlers.AsyncHandler<GetPartitionRequest, GetPartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetPartitionIndexesResult> getPartitionIndexesAsync(GetPartitionIndexesRequest request) {
return getPartitionIndexesAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetPartitionIndexesResult> getPartitionIndexesAsync(GetPartitionIndexesRequest request,
com.amazonaws.handlers.AsyncHandler<GetPartitionIndexesRequest, GetPartitionIndexesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetPartitionsResult> getPartitionsAsync(GetPartitionsRequest request) {
return getPartitionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetPartitionsResult> getPartitionsAsync(GetPartitionsRequest request,
com.amazonaws.handlers.AsyncHandler<GetPartitionsRequest, GetPartitionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetPlanResult> getPlanAsync(GetPlanRequest request) {
return getPlanAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetPlanResult> getPlanAsync(GetPlanRequest request,
com.amazonaws.handlers.AsyncHandler<GetPlanRequest, GetPlanResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetRegistryResult> getRegistryAsync(GetRegistryRequest request) {
return getRegistryAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetRegistryResult> getRegistryAsync(GetRegistryRequest request,
com.amazonaws.handlers.AsyncHandler<GetRegistryRequest, GetRegistryResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetResourcePoliciesResult> getResourcePoliciesAsync(GetResourcePoliciesRequest request) {
return getResourcePoliciesAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetResourcePoliciesResult> getResourcePoliciesAsync(GetResourcePoliciesRequest request,
com.amazonaws.handlers.AsyncHandler<GetResourcePoliciesRequest, GetResourcePoliciesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetResourcePolicyResult> getResourcePolicyAsync(GetResourcePolicyRequest request) {
return getResourcePolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetResourcePolicyResult> getResourcePolicyAsync(GetResourcePolicyRequest request,
com.amazonaws.handlers.AsyncHandler<GetResourcePolicyRequest, GetResourcePolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetSchemaResult> getSchemaAsync(GetSchemaRequest request) {
return getSchemaAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetSchemaResult> getSchemaAsync(GetSchemaRequest request,
com.amazonaws.handlers.AsyncHandler<GetSchemaRequest, GetSchemaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetSchemaByDefinitionResult> getSchemaByDefinitionAsync(GetSchemaByDefinitionRequest request) {
return getSchemaByDefinitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetSchemaByDefinitionResult> getSchemaByDefinitionAsync(GetSchemaByDefinitionRequest request,
com.amazonaws.handlers.AsyncHandler<GetSchemaByDefinitionRequest, GetSchemaByDefinitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetSchemaVersionResult> getSchemaVersionAsync(GetSchemaVersionRequest request) {
return getSchemaVersionAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetSchemaVersionResult> getSchemaVersionAsync(GetSchemaVersionRequest request,
com.amazonaws.handlers.AsyncHandler<GetSchemaVersionRequest, GetSchemaVersionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetSchemaVersionsDiffResult> getSchemaVersionsDiffAsync(GetSchemaVersionsDiffRequest request) {
return getSchemaVersionsDiffAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetSchemaVersionsDiffResult> getSchemaVersionsDiffAsync(GetSchemaVersionsDiffRequest request,
com.amazonaws.handlers.AsyncHandler<GetSchemaVersionsDiffRequest, GetSchemaVersionsDiffResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetSecurityConfigurationResult> getSecurityConfigurationAsync(GetSecurityConfigurationRequest request) {
return getSecurityConfigurationAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetSecurityConfigurationResult> getSecurityConfigurationAsync(GetSecurityConfigurationRequest request,
com.amazonaws.handlers.AsyncHandler<GetSecurityConfigurationRequest, GetSecurityConfigurationResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetSecurityConfigurationsResult> getSecurityConfigurationsAsync(GetSecurityConfigurationsRequest request) {
return getSecurityConfigurationsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetSecurityConfigurationsResult> getSecurityConfigurationsAsync(GetSecurityConfigurationsRequest request,
com.amazonaws.handlers.AsyncHandler<GetSecurityConfigurationsRequest, GetSecurityConfigurationsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetTableResult> getTableAsync(GetTableRequest request) {
return getTableAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetTableResult> getTableAsync(GetTableRequest request,
com.amazonaws.handlers.AsyncHandler<GetTableRequest, GetTableResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetTableVersionResult> getTableVersionAsync(GetTableVersionRequest request) {
return getTableVersionAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetTableVersionResult> getTableVersionAsync(GetTableVersionRequest request,
com.amazonaws.handlers.AsyncHandler<GetTableVersionRequest, GetTableVersionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetTableVersionsResult> getTableVersionsAsync(GetTableVersionsRequest request) {
return getTableVersionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetTableVersionsResult> getTableVersionsAsync(GetTableVersionsRequest request,
com.amazonaws.handlers.AsyncHandler<GetTableVersionsRequest, GetTableVersionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetTablesResult> getTablesAsync(GetTablesRequest request) {
return getTablesAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetTablesResult> getTablesAsync(GetTablesRequest request,
com.amazonaws.handlers.AsyncHandler<GetTablesRequest, GetTablesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetTagsResult> getTagsAsync(GetTagsRequest request) {
return getTagsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetTagsResult> getTagsAsync(GetTagsRequest request,
com.amazonaws.handlers.AsyncHandler<GetTagsRequest, GetTagsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetTriggerResult> getTriggerAsync(GetTriggerRequest request) {
return getTriggerAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetTriggerResult> getTriggerAsync(GetTriggerRequest request,
com.amazonaws.handlers.AsyncHandler<GetTriggerRequest, GetTriggerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetTriggersResult> getTriggersAsync(GetTriggersRequest request) {
return getTriggersAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetTriggersResult> getTriggersAsync(GetTriggersRequest request,
com.amazonaws.handlers.AsyncHandler<GetTriggersRequest, GetTriggersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetUnfilteredPartitionMetadataResult> getUnfilteredPartitionMetadataAsync(GetUnfilteredPartitionMetadataRequest request) {
return getUnfilteredPartitionMetadataAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetUnfilteredPartitionMetadataResult> getUnfilteredPartitionMetadataAsync(GetUnfilteredPartitionMetadataRequest request,
com.amazonaws.handlers.AsyncHandler<GetUnfilteredPartitionMetadataRequest, GetUnfilteredPartitionMetadataResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetUnfilteredPartitionsMetadataResult> getUnfilteredPartitionsMetadataAsync(
GetUnfilteredPartitionsMetadataRequest request) {
return getUnfilteredPartitionsMetadataAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetUnfilteredPartitionsMetadataResult> getUnfilteredPartitionsMetadataAsync(
GetUnfilteredPartitionsMetadataRequest request,
com.amazonaws.handlers.AsyncHandler<GetUnfilteredPartitionsMetadataRequest, GetUnfilteredPartitionsMetadataResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetUnfilteredTableMetadataResult> getUnfilteredTableMetadataAsync(GetUnfilteredTableMetadataRequest request) {
return getUnfilteredTableMetadataAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetUnfilteredTableMetadataResult> getUnfilteredTableMetadataAsync(GetUnfilteredTableMetadataRequest request,
com.amazonaws.handlers.AsyncHandler<GetUnfilteredTableMetadataRequest, GetUnfilteredTableMetadataResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetUserDefinedFunctionResult> getUserDefinedFunctionAsync(GetUserDefinedFunctionRequest request) {
return getUserDefinedFunctionAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetUserDefinedFunctionResult> getUserDefinedFunctionAsync(GetUserDefinedFunctionRequest request,
com.amazonaws.handlers.AsyncHandler<GetUserDefinedFunctionRequest, GetUserDefinedFunctionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetUserDefinedFunctionsResult> getUserDefinedFunctionsAsync(GetUserDefinedFunctionsRequest request) {
return getUserDefinedFunctionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetUserDefinedFunctionsResult> getUserDefinedFunctionsAsync(GetUserDefinedFunctionsRequest request,
com.amazonaws.handlers.AsyncHandler<GetUserDefinedFunctionsRequest, GetUserDefinedFunctionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetWorkflowResult> getWorkflowAsync(GetWorkflowRequest request) {
return getWorkflowAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetWorkflowResult> getWorkflowAsync(GetWorkflowRequest request,
com.amazonaws.handlers.AsyncHandler<GetWorkflowRequest, GetWorkflowResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetWorkflowRunResult> getWorkflowRunAsync(GetWorkflowRunRequest request) {
return getWorkflowRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetWorkflowRunResult> getWorkflowRunAsync(GetWorkflowRunRequest request,
com.amazonaws.handlers.AsyncHandler<GetWorkflowRunRequest, GetWorkflowRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetWorkflowRunPropertiesResult> getWorkflowRunPropertiesAsync(GetWorkflowRunPropertiesRequest request) {
return getWorkflowRunPropertiesAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetWorkflowRunPropertiesResult> getWorkflowRunPropertiesAsync(GetWorkflowRunPropertiesRequest request,
com.amazonaws.handlers.AsyncHandler<GetWorkflowRunPropertiesRequest, GetWorkflowRunPropertiesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetWorkflowRunsResult> getWorkflowRunsAsync(GetWorkflowRunsRequest request) {
return getWorkflowRunsAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetWorkflowRunsResult> getWorkflowRunsAsync(GetWorkflowRunsRequest request,
com.amazonaws.handlers.AsyncHandler<GetWorkflowRunsRequest, GetWorkflowRunsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ImportCatalogToGlueResult> importCatalogToGlueAsync(ImportCatalogToGlueRequest request) {
return importCatalogToGlueAsync(request, null);
}
@Override
public java.util.concurrent.Future<ImportCatalogToGlueResult> importCatalogToGlueAsync(ImportCatalogToGlueRequest request,
com.amazonaws.handlers.AsyncHandler<ImportCatalogToGlueRequest, ImportCatalogToGlueResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListBlueprintsResult> listBlueprintsAsync(ListBlueprintsRequest request) {
return listBlueprintsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListBlueprintsResult> listBlueprintsAsync(ListBlueprintsRequest request,
com.amazonaws.handlers.AsyncHandler<ListBlueprintsRequest, ListBlueprintsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListCrawlersResult> listCrawlersAsync(ListCrawlersRequest request) {
return listCrawlersAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListCrawlersResult> listCrawlersAsync(ListCrawlersRequest request,
com.amazonaws.handlers.AsyncHandler<ListCrawlersRequest, ListCrawlersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListDevEndpointsResult> listDevEndpointsAsync(ListDevEndpointsRequest request) {
return listDevEndpointsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListDevEndpointsResult> listDevEndpointsAsync(ListDevEndpointsRequest request,
com.amazonaws.handlers.AsyncHandler<ListDevEndpointsRequest, ListDevEndpointsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListJobsResult> listJobsAsync(ListJobsRequest request) {
return listJobsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListJobsResult> listJobsAsync(ListJobsRequest request,
com.amazonaws.handlers.AsyncHandler<ListJobsRequest, ListJobsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListMLTransformsResult> listMLTransformsAsync(ListMLTransformsRequest request) {
return listMLTransformsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListMLTransformsResult> listMLTransformsAsync(ListMLTransformsRequest request,
com.amazonaws.handlers.AsyncHandler<ListMLTransformsRequest, ListMLTransformsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListRegistriesResult> listRegistriesAsync(ListRegistriesRequest request) {
return listRegistriesAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListRegistriesResult> listRegistriesAsync(ListRegistriesRequest request,
com.amazonaws.handlers.AsyncHandler<ListRegistriesRequest, ListRegistriesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListSchemaVersionsResult> listSchemaVersionsAsync(ListSchemaVersionsRequest request) {
return listSchemaVersionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListSchemaVersionsResult> listSchemaVersionsAsync(ListSchemaVersionsRequest request,
com.amazonaws.handlers.AsyncHandler<ListSchemaVersionsRequest, ListSchemaVersionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListSchemasResult> listSchemasAsync(ListSchemasRequest request) {
return listSchemasAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListSchemasResult> listSchemasAsync(ListSchemasRequest request,
com.amazonaws.handlers.AsyncHandler<ListSchemasRequest, ListSchemasResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListTriggersResult> listTriggersAsync(ListTriggersRequest request) {
return listTriggersAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListTriggersResult> listTriggersAsync(ListTriggersRequest request,
com.amazonaws.handlers.AsyncHandler<ListTriggersRequest, ListTriggersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListWorkflowsResult> listWorkflowsAsync(ListWorkflowsRequest request) {
return listWorkflowsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListWorkflowsResult> listWorkflowsAsync(ListWorkflowsRequest request,
com.amazonaws.handlers.AsyncHandler<ListWorkflowsRequest, ListWorkflowsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutDataCatalogEncryptionSettingsResult> putDataCatalogEncryptionSettingsAsync(
PutDataCatalogEncryptionSettingsRequest request) {
return putDataCatalogEncryptionSettingsAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutDataCatalogEncryptionSettingsResult> putDataCatalogEncryptionSettingsAsync(
PutDataCatalogEncryptionSettingsRequest request,
com.amazonaws.handlers.AsyncHandler<PutDataCatalogEncryptionSettingsRequest, PutDataCatalogEncryptionSettingsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutResourcePolicyResult> putResourcePolicyAsync(PutResourcePolicyRequest request) {
return putResourcePolicyAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutResourcePolicyResult> putResourcePolicyAsync(PutResourcePolicyRequest request,
com.amazonaws.handlers.AsyncHandler<PutResourcePolicyRequest, PutResourcePolicyResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutSchemaVersionMetadataResult> putSchemaVersionMetadataAsync(PutSchemaVersionMetadataRequest request) {
return putSchemaVersionMetadataAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutSchemaVersionMetadataResult> putSchemaVersionMetadataAsync(PutSchemaVersionMetadataRequest request,
com.amazonaws.handlers.AsyncHandler<PutSchemaVersionMetadataRequest, PutSchemaVersionMetadataResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutWorkflowRunPropertiesResult> putWorkflowRunPropertiesAsync(PutWorkflowRunPropertiesRequest request) {
return putWorkflowRunPropertiesAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutWorkflowRunPropertiesResult> putWorkflowRunPropertiesAsync(PutWorkflowRunPropertiesRequest request,
com.amazonaws.handlers.AsyncHandler<PutWorkflowRunPropertiesRequest, PutWorkflowRunPropertiesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<QuerySchemaVersionMetadataResult> querySchemaVersionMetadataAsync(QuerySchemaVersionMetadataRequest request) {
return querySchemaVersionMetadataAsync(request, null);
}
@Override
public java.util.concurrent.Future<QuerySchemaVersionMetadataResult> querySchemaVersionMetadataAsync(QuerySchemaVersionMetadataRequest request,
com.amazonaws.handlers.AsyncHandler<QuerySchemaVersionMetadataRequest, QuerySchemaVersionMetadataResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<RegisterSchemaVersionResult> registerSchemaVersionAsync(RegisterSchemaVersionRequest request) {
return registerSchemaVersionAsync(request, null);
}
@Override
public java.util.concurrent.Future<RegisterSchemaVersionResult> registerSchemaVersionAsync(RegisterSchemaVersionRequest request,
com.amazonaws.handlers.AsyncHandler<RegisterSchemaVersionRequest, RegisterSchemaVersionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<RemoveSchemaVersionMetadataResult> removeSchemaVersionMetadataAsync(RemoveSchemaVersionMetadataRequest request) {
return removeSchemaVersionMetadataAsync(request, null);
}
@Override
public java.util.concurrent.Future<RemoveSchemaVersionMetadataResult> removeSchemaVersionMetadataAsync(RemoveSchemaVersionMetadataRequest request,
com.amazonaws.handlers.AsyncHandler<RemoveSchemaVersionMetadataRequest, RemoveSchemaVersionMetadataResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ResetJobBookmarkResult> resetJobBookmarkAsync(ResetJobBookmarkRequest request) {
return resetJobBookmarkAsync(request, null);
}
@Override
public java.util.concurrent.Future<ResetJobBookmarkResult> resetJobBookmarkAsync(ResetJobBookmarkRequest request,
com.amazonaws.handlers.AsyncHandler<ResetJobBookmarkRequest, ResetJobBookmarkResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ResumeWorkflowRunResult> resumeWorkflowRunAsync(ResumeWorkflowRunRequest request) {
return resumeWorkflowRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<ResumeWorkflowRunResult> resumeWorkflowRunAsync(ResumeWorkflowRunRequest request,
com.amazonaws.handlers.AsyncHandler<ResumeWorkflowRunRequest, ResumeWorkflowRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<SearchTablesResult> searchTablesAsync(SearchTablesRequest request) {
return searchTablesAsync(request, null);
}
@Override
public java.util.concurrent.Future<SearchTablesResult> searchTablesAsync(SearchTablesRequest request,
com.amazonaws.handlers.AsyncHandler<SearchTablesRequest, SearchTablesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartBlueprintRunResult> startBlueprintRunAsync(StartBlueprintRunRequest request) {
return startBlueprintRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartBlueprintRunResult> startBlueprintRunAsync(StartBlueprintRunRequest request,
com.amazonaws.handlers.AsyncHandler<StartBlueprintRunRequest, StartBlueprintRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartCrawlerResult> startCrawlerAsync(StartCrawlerRequest request) {
return startCrawlerAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartCrawlerResult> startCrawlerAsync(StartCrawlerRequest request,
com.amazonaws.handlers.AsyncHandler<StartCrawlerRequest, StartCrawlerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartCrawlerScheduleResult> startCrawlerScheduleAsync(StartCrawlerScheduleRequest request) {
return startCrawlerScheduleAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartCrawlerScheduleResult> startCrawlerScheduleAsync(StartCrawlerScheduleRequest request,
com.amazonaws.handlers.AsyncHandler<StartCrawlerScheduleRequest, StartCrawlerScheduleResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartExportLabelsTaskRunResult> startExportLabelsTaskRunAsync(StartExportLabelsTaskRunRequest request) {
return startExportLabelsTaskRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartExportLabelsTaskRunResult> startExportLabelsTaskRunAsync(StartExportLabelsTaskRunRequest request,
com.amazonaws.handlers.AsyncHandler<StartExportLabelsTaskRunRequest, StartExportLabelsTaskRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartImportLabelsTaskRunResult> startImportLabelsTaskRunAsync(StartImportLabelsTaskRunRequest request) {
return startImportLabelsTaskRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartImportLabelsTaskRunResult> startImportLabelsTaskRunAsync(StartImportLabelsTaskRunRequest request,
com.amazonaws.handlers.AsyncHandler<StartImportLabelsTaskRunRequest, StartImportLabelsTaskRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartJobRunResult> startJobRunAsync(StartJobRunRequest request) {
return startJobRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartJobRunResult> startJobRunAsync(StartJobRunRequest request,
com.amazonaws.handlers.AsyncHandler<StartJobRunRequest, StartJobRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartMLEvaluationTaskRunResult> startMLEvaluationTaskRunAsync(StartMLEvaluationTaskRunRequest request) {
return startMLEvaluationTaskRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartMLEvaluationTaskRunResult> startMLEvaluationTaskRunAsync(StartMLEvaluationTaskRunRequest request,
com.amazonaws.handlers.AsyncHandler<StartMLEvaluationTaskRunRequest, StartMLEvaluationTaskRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartMLLabelingSetGenerationTaskRunResult> startMLLabelingSetGenerationTaskRunAsync(
StartMLLabelingSetGenerationTaskRunRequest request) {
return startMLLabelingSetGenerationTaskRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartMLLabelingSetGenerationTaskRunResult> startMLLabelingSetGenerationTaskRunAsync(
StartMLLabelingSetGenerationTaskRunRequest request,
com.amazonaws.handlers.AsyncHandler<StartMLLabelingSetGenerationTaskRunRequest, StartMLLabelingSetGenerationTaskRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartTriggerResult> startTriggerAsync(StartTriggerRequest request) {
return startTriggerAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartTriggerResult> startTriggerAsync(StartTriggerRequest request,
com.amazonaws.handlers.AsyncHandler<StartTriggerRequest, StartTriggerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StartWorkflowRunResult> startWorkflowRunAsync(StartWorkflowRunRequest request) {
return startWorkflowRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StartWorkflowRunResult> startWorkflowRunAsync(StartWorkflowRunRequest request,
com.amazonaws.handlers.AsyncHandler<StartWorkflowRunRequest, StartWorkflowRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StopCrawlerResult> stopCrawlerAsync(StopCrawlerRequest request) {
return stopCrawlerAsync(request, null);
}
@Override
public java.util.concurrent.Future<StopCrawlerResult> stopCrawlerAsync(StopCrawlerRequest request,
com.amazonaws.handlers.AsyncHandler<StopCrawlerRequest, StopCrawlerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StopCrawlerScheduleResult> stopCrawlerScheduleAsync(StopCrawlerScheduleRequest request) {
return stopCrawlerScheduleAsync(request, null);
}
@Override
public java.util.concurrent.Future<StopCrawlerScheduleResult> stopCrawlerScheduleAsync(StopCrawlerScheduleRequest request,
com.amazonaws.handlers.AsyncHandler<StopCrawlerScheduleRequest, StopCrawlerScheduleResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StopTriggerResult> stopTriggerAsync(StopTriggerRequest request) {
return stopTriggerAsync(request, null);
}
@Override
public java.util.concurrent.Future<StopTriggerResult> stopTriggerAsync(StopTriggerRequest request,
com.amazonaws.handlers.AsyncHandler<StopTriggerRequest, StopTriggerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<StopWorkflowRunResult> stopWorkflowRunAsync(StopWorkflowRunRequest request) {
return stopWorkflowRunAsync(request, null);
}
@Override
public java.util.concurrent.Future<StopWorkflowRunResult> stopWorkflowRunAsync(StopWorkflowRunRequest request,
com.amazonaws.handlers.AsyncHandler<StopWorkflowRunRequest, StopWorkflowRunResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request) {
return tagResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request,
com.amazonaws.handlers.AsyncHandler<TagResourceRequest, TagResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request) {
return untagResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request,
com.amazonaws.handlers.AsyncHandler<UntagResourceRequest, UntagResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateBlueprintResult> updateBlueprintAsync(UpdateBlueprintRequest request) {
return updateBlueprintAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateBlueprintResult> updateBlueprintAsync(UpdateBlueprintRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateBlueprintRequest, UpdateBlueprintResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateClassifierResult> updateClassifierAsync(UpdateClassifierRequest request) {
return updateClassifierAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateClassifierResult> updateClassifierAsync(UpdateClassifierRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateClassifierRequest, UpdateClassifierResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateColumnStatisticsForPartitionResult> updateColumnStatisticsForPartitionAsync(
UpdateColumnStatisticsForPartitionRequest request) {
return updateColumnStatisticsForPartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateColumnStatisticsForPartitionResult> updateColumnStatisticsForPartitionAsync(
UpdateColumnStatisticsForPartitionRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateColumnStatisticsForPartitionRequest, UpdateColumnStatisticsForPartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateColumnStatisticsForTableResult> updateColumnStatisticsForTableAsync(UpdateColumnStatisticsForTableRequest request) {
return updateColumnStatisticsForTableAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateColumnStatisticsForTableResult> updateColumnStatisticsForTableAsync(UpdateColumnStatisticsForTableRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateColumnStatisticsForTableRequest, UpdateColumnStatisticsForTableResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateConnectionResult> updateConnectionAsync(UpdateConnectionRequest request) {
return updateConnectionAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateConnectionResult> updateConnectionAsync(UpdateConnectionRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateConnectionRequest, UpdateConnectionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateCrawlerResult> updateCrawlerAsync(UpdateCrawlerRequest request) {
return updateCrawlerAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateCrawlerResult> updateCrawlerAsync(UpdateCrawlerRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateCrawlerRequest, UpdateCrawlerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateCrawlerScheduleResult> updateCrawlerScheduleAsync(UpdateCrawlerScheduleRequest request) {
return updateCrawlerScheduleAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateCrawlerScheduleResult> updateCrawlerScheduleAsync(UpdateCrawlerScheduleRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateCrawlerScheduleRequest, UpdateCrawlerScheduleResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateDatabaseResult> updateDatabaseAsync(UpdateDatabaseRequest request) {
return updateDatabaseAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateDatabaseResult> updateDatabaseAsync(UpdateDatabaseRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateDatabaseRequest, UpdateDatabaseResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateDevEndpointResult> updateDevEndpointAsync(UpdateDevEndpointRequest request) {
return updateDevEndpointAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateDevEndpointResult> updateDevEndpointAsync(UpdateDevEndpointRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateDevEndpointRequest, UpdateDevEndpointResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateJobResult> updateJobAsync(UpdateJobRequest request) {
return updateJobAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateJobResult> updateJobAsync(UpdateJobRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateJobRequest, UpdateJobResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateMLTransformResult> updateMLTransformAsync(UpdateMLTransformRequest request) {
return updateMLTransformAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateMLTransformResult> updateMLTransformAsync(UpdateMLTransformRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateMLTransformRequest, UpdateMLTransformResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdatePartitionResult> updatePartitionAsync(UpdatePartitionRequest request) {
return updatePartitionAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdatePartitionResult> updatePartitionAsync(UpdatePartitionRequest request,
com.amazonaws.handlers.AsyncHandler<UpdatePartitionRequest, UpdatePartitionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateRegistryResult> updateRegistryAsync(UpdateRegistryRequest request) {
return updateRegistryAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateRegistryResult> updateRegistryAsync(UpdateRegistryRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateRegistryRequest, UpdateRegistryResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateSchemaResult> updateSchemaAsync(UpdateSchemaRequest request) {
return updateSchemaAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateSchemaResult> updateSchemaAsync(UpdateSchemaRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateSchemaRequest, UpdateSchemaResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateTableResult> updateTableAsync(UpdateTableRequest request) {
return updateTableAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateTableResult> updateTableAsync(UpdateTableRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateTableRequest, UpdateTableResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateTriggerResult> updateTriggerAsync(UpdateTriggerRequest request) {
return updateTriggerAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateTriggerResult> updateTriggerAsync(UpdateTriggerRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateTriggerRequest, UpdateTriggerResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateUserDefinedFunctionResult> updateUserDefinedFunctionAsync(UpdateUserDefinedFunctionRequest request) {
return updateUserDefinedFunctionAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateUserDefinedFunctionResult> updateUserDefinedFunctionAsync(UpdateUserDefinedFunctionRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateUserDefinedFunctionRequest, UpdateUserDefinedFunctionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateWorkflowResult> updateWorkflowAsync(UpdateWorkflowRequest request) {
return updateWorkflowAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateWorkflowResult> updateWorkflowAsync(UpdateWorkflowRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateWorkflowRequest, UpdateWorkflowResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
}
| |
/*
// Licensed to Julian Hyde under one or more contributor license
// agreements. See the NOTICE file distributed with this work for
// additional information regarding copyright ownership.
//
// Julian Hyde licenses this file to you under the Apache License,
// Version 2.0 (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
*/
package net.hydromatic.optiq.test;
import net.hydromatic.avatica.ByteString;
import net.hydromatic.optiq.runtime.*;
import org.junit.Test;
import java.util.*;
import static net.hydromatic.optiq.runtime.SqlFunctions.*;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
/**
* Unit test for the methods in {@link SqlFunctions} that implement SQL
* functions.
*/
public class SqlFunctionsTest {
@Test public void testCharLength() {
assertEquals(3, charLength("xyz"));
}
@Test public void testConcat() {
assertEquals("a bcd", concat("a b", "cd"));
// The code generator will ensure that nulls are never passed in. If we
// pass in null, it is treated like the string "null", as the following
// tests show. Not the desired behavior for SQL.
assertEquals("anull", concat("a", null));
assertEquals("nullnull", concat((String) null, null));
assertEquals("nullb", concat(null, "b"));
}
@Test public void testLower() {
assertEquals("a bcd", lower("A bCd"));
}
@Test public void testUpper() {
assertEquals("A BCD", upper("A bCd"));
}
@Test public void testInitcap() {
assertEquals("Aa", initcap("aA"));
assertEquals("Zz", initcap("zz"));
assertEquals("Az", initcap("AZ"));
assertEquals("Try A Little ", initcap("tRy a littlE "));
assertEquals("Won'T It?No", initcap("won't it?no"));
assertEquals("1a", initcap("1A"));
assertEquals(" B0123b", initcap(" b0123B"));
}
@Test public void testLesser() {
assertEquals("a", lesser("a", "bc"));
assertEquals("ac", lesser("bc", "ac"));
try {
Object o = lesser("a", null);
fail("Expected NPE, got " + o);
} catch (NullPointerException e) {
// ok
}
assertEquals("a", lesser(null, "a"));
assertNull(lesser((String) null, null));
}
@Test public void testGreater() {
assertEquals("bc", greater("a", "bc"));
assertEquals("bc", greater("bc", "ac"));
try {
Object o = greater("a", null);
fail("Expected NPE, got " + o);
} catch (NullPointerException e) {
// ok
}
assertEquals("a", greater(null, "a"));
assertNull(greater((String) null, null));
}
/** Test for {@link SqlFunctions#rtrim}. */
@Test public void testRtrim() {
assertEquals("", rtrim(""));
assertEquals("", rtrim(" "));
assertEquals(" x", rtrim(" x "));
assertEquals(" x", rtrim(" x "));
assertEquals(" x y", rtrim(" x y "));
assertEquals(" x", rtrim(" x"));
assertEquals("x", rtrim("x"));
}
/** Test for {@link SqlFunctions#ltrim}. */
@Test public void testLtrim() {
assertEquals("", ltrim(""));
assertEquals("", ltrim(" "));
assertEquals("x ", ltrim(" x "));
assertEquals("x ", ltrim(" x "));
assertEquals("x y ", ltrim("x y "));
assertEquals("x", ltrim(" x"));
assertEquals("x", ltrim("x"));
}
/** Test for {@link SqlFunctions#trim}. */
@Test public void testTrim() {
assertEquals("", trimSpacesBoth(""));
assertEquals("", trimSpacesBoth(" "));
assertEquals("x", trimSpacesBoth(" x "));
assertEquals("x", trimSpacesBoth(" x "));
assertEquals("x y", trimSpacesBoth(" x y "));
assertEquals("x", trimSpacesBoth(" x"));
assertEquals("x", trimSpacesBoth("x"));
}
static String trimSpacesBoth(String s) {
return trim(true, true, " ", s);
}
@Test public void testUnixDateToString() {
// Verify these using the "date" command. E.g.
// $ date -u --date="@$(expr 10957 \* 86400)"
// Sat Jan 1 00:00:00 UTC 2000
assertEquals("2000-01-01", unixDateToString(10957));
assertEquals("1970-01-01", unixDateToString(0));
assertEquals("1970-01-02", unixDateToString(1));
assertEquals("1971-01-01", unixDateToString(365));
assertEquals("1972-01-01", unixDateToString(730));
assertEquals("1972-02-28", unixDateToString(788));
assertEquals("1972-02-29", unixDateToString(789));
assertEquals("1972-03-01", unixDateToString(790));
assertEquals("1969-01-01", unixDateToString(-365));
assertEquals("2000-01-01", unixDateToString(10957));
assertEquals("2000-02-28", unixDateToString(11015));
assertEquals("2000-02-29", unixDateToString(11016));
assertEquals("2000-03-01", unixDateToString(11017));
assertEquals("1945-02-24", unixDateToString(-9077));
}
@Test public void testYmdToUnixDate() {
assertEquals(0, ymdToUnixDate(1970, 1, 1));
assertEquals(365, ymdToUnixDate(1971, 1, 1));
assertEquals(-365, ymdToUnixDate(1969, 1, 1));
assertEquals(11017, ymdToUnixDate(2000, 3, 1));
assertEquals(-9077, ymdToUnixDate(1945, 2, 24));
}
@Test public void testDateToString() {
assertEquals("1970-01-01", unixDateToString(0));
assertEquals("1971-02-03", unixDateToString(0 + 365 + 31 + 2));
}
@Test public void testTimeToString() {
assertEquals("00:00:00", unixTimeToString(0));
assertEquals("23:59:59", unixTimeToString(86400000 - 1));
}
@Test public void testTimestampToString() {
// ISO format would be "1970-01-01T00:00:00" but SQL format is different
assertEquals("1970-01-01 00:00:00", unixTimestampToString(0));
assertEquals(
"1970-02-01 23:59:59",
unixTimestampToString(86400000L * 32L - 1L));
}
@Test public void testIntervalYearMonthToString() {
TimeUnitRange range = TimeUnitRange.YEAR_TO_MONTH;
assertEquals("+0-00", intervalYearMonthToString(0, range));
assertEquals("+1-00", intervalYearMonthToString(12, range));
assertEquals("+1-01", intervalYearMonthToString(13, range));
assertEquals("-1-01", intervalYearMonthToString(-13, range));
}
@Test public void testIntervalDayTimeToString() {
assertEquals("+0", intervalYearMonthToString(0, TimeUnitRange.YEAR));
assertEquals("+0-00",
intervalYearMonthToString(0, TimeUnitRange.YEAR_TO_MONTH));
assertEquals("+0", intervalYearMonthToString(0, TimeUnitRange.MONTH));
assertEquals("+0", intervalDayTimeToString(0, TimeUnitRange.DAY, 0));
assertEquals("+0 00",
intervalDayTimeToString(0, TimeUnitRange.DAY_TO_HOUR, 0));
assertEquals("+0 00:00",
intervalDayTimeToString(0, TimeUnitRange.DAY_TO_MINUTE, 0));
assertEquals("+0 00:00:00",
intervalDayTimeToString(0, TimeUnitRange.DAY_TO_SECOND, 0));
assertEquals("+0", intervalDayTimeToString(0, TimeUnitRange.HOUR, 0));
assertEquals("+0:00",
intervalDayTimeToString(0, TimeUnitRange.HOUR_TO_MINUTE, 0));
assertEquals("+0:00:00",
intervalDayTimeToString(0, TimeUnitRange.HOUR_TO_SECOND, 0));
assertEquals("+0",
intervalDayTimeToString(0, TimeUnitRange.MINUTE, 0));
assertEquals("+0:00",
intervalDayTimeToString(0, TimeUnitRange.MINUTE_TO_SECOND, 0));
assertEquals("+0",
intervalDayTimeToString(0, TimeUnitRange.SECOND, 0));
}
@Test public void testYmdToJulian() {
// All checked using http://aa.usno.navy.mil/data/docs/JulianDate.php.
// We round up - if JulianDate.php gives 2451544.5, we use 2451545.
assertThat(ymdToJulian(2014, 4, 3), equalTo(2456751));
assertThat(ymdToJulian(2000, 1, 1), equalTo(2451545));
assertThat(ymdToJulian(1970, 1, 1), equalTo(2440588));
assertThat(ymdToJulian(1970, 1, 1), equalTo(EPOCH_JULIAN));
assertThat(ymdToJulian(1901, 1, 1), equalTo(2415386));
assertThat(ymdToJulian(1900, 10, 17), equalTo(2415310));
assertThat(ymdToJulian(1900, 3, 1), equalTo(2415080));
assertThat(ymdToJulian(1900, 2, 28), equalTo(2415079));
assertThat(ymdToJulian(1900, 2, 1), equalTo(2415052));
assertThat(ymdToJulian(1900, 1, 1), equalTo(2415021));
assertThat(ymdToJulian(1777, 7, 4), equalTo(2370281));
}
@Test public void testExtract() {
assertThat(unixDateExtract(TimeUnitRange.YEAR, 0), equalTo(1970));
assertThat(unixDateExtract(TimeUnitRange.YEAR, -1), equalTo(1969));
assertThat(unixDateExtract(TimeUnitRange.YEAR, 364), equalTo(1970));
assertThat(unixDateExtract(TimeUnitRange.YEAR, 365), equalTo(1971));
assertThat(unixDateExtract(TimeUnitRange.MONTH, 0), equalTo(1));
assertThat(unixDateExtract(TimeUnitRange.MONTH, -1), equalTo(12));
assertThat(unixDateExtract(TimeUnitRange.MONTH, 364), equalTo(12));
assertThat(unixDateExtract(TimeUnitRange.MONTH, 365), equalTo(1));
thereAndBack(2000, 1, 1);
thereAndBack(2000, 2, 28);
thereAndBack(2000, 2, 29); // does day
thereAndBack(2000, 3, 1);
thereAndBack(1964, 1, 1);
thereAndBack(1964, 2, 28);
thereAndBack(1964, 2, 29); // leap day
thereAndBack(1964, 3, 1);
thereAndBack(1864, 1, 1);
thereAndBack(1864, 2, 28);
thereAndBack(1864, 2, 29); // leap day
thereAndBack(1864, 3, 1);
thereAndBack(1900, 1, 1);
thereAndBack(1900, 2, 28);
thereAndBack(1900, 3, 1);
thereAndBack(2004, 2, 28);
thereAndBack(2004, 2, 29); // leap day
thereAndBack(2004, 3, 1);
}
private void thereAndBack(int year, int month, int day) {
final int unixDate = ymdToUnixDate(year, month, day);
assertThat(unixDateExtract(TimeUnitRange.YEAR, unixDate), equalTo(year));
assertThat(unixDateExtract(TimeUnitRange.MONTH, unixDate), equalTo(month));
assertThat(unixDateExtract(TimeUnitRange.DAY, unixDate), equalTo(day));
}
/** Unit test for
* {@link Utilities#compare(java.util.List, java.util.List)}. */
@Test public void testCompare() {
final List<String> ac = Arrays.asList("a", "c");
final List<String> abc = Arrays.asList("a", "b", "c");
final List<String> a = Arrays.asList("a");
final List<String> empty = Collections.emptyList();
assertEquals(0, Utilities.compare(ac, ac));
assertEquals(0, Utilities.compare(ac, new ArrayList<String>(ac)));
assertEquals(-1, Utilities.compare(a, ac));
assertEquals(-1, Utilities.compare(empty, ac));
assertEquals(1, Utilities.compare(ac, a));
assertEquals(1, Utilities.compare(ac, abc));
assertEquals(1, Utilities.compare(ac, empty));
assertEquals(0, Utilities.compare(empty, empty));
}
@Test public void testTruncateLong() {
assertEquals(12000L, SqlFunctions.truncate(12345L, 1000L));
assertEquals(12000L, SqlFunctions.truncate(12000L, 1000L));
assertEquals(12000L, SqlFunctions.truncate(12001L, 1000L));
assertEquals(11000L, SqlFunctions.truncate(11999L, 1000L));
assertEquals(-13000L, SqlFunctions.truncate(-12345L, 1000L));
assertEquals(-12000L, SqlFunctions.truncate(-12000L, 1000L));
assertEquals(-13000L, SqlFunctions.truncate(-12001L, 1000L));
assertEquals(-12000L, SqlFunctions.truncate(-11999L, 1000L));
}
@Test public void testTruncateInt() {
assertEquals(12000, SqlFunctions.truncate(12345, 1000));
assertEquals(12000, SqlFunctions.truncate(12000, 1000));
assertEquals(12000, SqlFunctions.truncate(12001, 1000));
assertEquals(11000, SqlFunctions.truncate(11999, 1000));
assertEquals(-13000, SqlFunctions.truncate(-12345, 1000));
assertEquals(-12000, SqlFunctions.truncate(-12000, 1000));
assertEquals(-13000, SqlFunctions.truncate(-12001, 1000));
assertEquals(-12000, SqlFunctions.truncate(-11999, 1000));
assertEquals(12000, SqlFunctions.round(12345, 1000));
assertEquals(13000, SqlFunctions.round(12845, 1000));
assertEquals(-12000, SqlFunctions.round(-12345, 1000));
assertEquals(-13000, SqlFunctions.round(-12845, 1000));
}
@Test public void testByteString() {
final byte[] bytes = {(byte) 0xAB, (byte) 0xFF};
final ByteString byteString = new ByteString(bytes);
assertEquals(2, byteString.length());
assertEquals("abff", byteString.toString());
assertEquals("abff", byteString.toString(16));
assertEquals("1010101111111111", byteString.toString(2));
final ByteString emptyByteString = new ByteString(new byte[0]);
assertEquals(0, emptyByteString.length());
assertEquals("", emptyByteString.toString());
assertEquals("", emptyByteString.toString(16));
assertEquals("", emptyByteString.toString(2));
assertEquals(emptyByteString, ByteString.EMPTY);
assertEquals("ff", byteString.substring(1, 2).toString());
assertEquals("abff", byteString.substring(0, 2).toString());
assertEquals("", byteString.substring(2, 2).toString());
// Add empty string, get original string back
assertSame(byteString.concat(emptyByteString), byteString);
final ByteString byteString1 = new ByteString(new byte[]{(byte) 12});
assertEquals("abff0c", byteString.concat(byteString1).toString());
final byte[] bytes3 = {(byte) 0xFF};
final ByteString byteString3 = new ByteString(bytes3);
assertEquals(0, byteString.indexOf(emptyByteString));
assertEquals(-1, byteString.indexOf(byteString1));
assertEquals(1, byteString.indexOf(byteString3));
assertEquals(-1, byteString3.indexOf(byteString));
}
@Test public void testEasyLog10() {
assertEquals(1, SqlFunctions.digitCount(0));
assertEquals(1, SqlFunctions.digitCount(1));
assertEquals(1, SqlFunctions.digitCount(9));
assertEquals(2, SqlFunctions.digitCount(10));
assertEquals(2, SqlFunctions.digitCount(11));
assertEquals(2, SqlFunctions.digitCount(99));
assertEquals(3, SqlFunctions.digitCount(100));
}
}
// End SqlFunctionsTest.java
| |
/*
* Copyright 2002-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.springframework.util;
import java.util.Collection;
import java.util.Map;
/**
* Assertion utility class that assists in validating arguments.
* Useful for identifying programmer errors early and clearly at runtime.
*
* <p>For example, if the contract of a public method states it does not
* allow <code>null</code> arguments, Assert can be used to validate that
* contract. Doing this clearly indicates a contract violation when it
* occurs and protects the class's invariants.
*
* <p>Typically used to validate method arguments rather than configuration
* properties, to check for cases that are usually programmer errors rather than
* configuration errors. In contrast to config initialization code, there is
* usally no point in falling back to defaults in such methods.
*
* <p>This class is similar to JUnit's assertion library. If an argument value is
* deemed invalid, an {@link IllegalArgumentException} is thrown (typically).
* For example:
*
* <pre class="code">
* Assert.notNull(clazz, "The class must not be null");
* Assert.isTrue(i > 0, "The value must be greater than zero");</pre>
*
* Mainly for internal use within the framework; consider Jakarta's Commons Lang
* >= 2.0 for a more comprehensive suite of assertion utilities.
*
* @author Keith Donald
* @author Juergen Hoeller
* @author Colin Sampaleanu
* @author Rob Harrop
* @since 1.1.2
*/
public abstract class Assert {
/**
* Assert a boolean expression, throwing <code>IllegalArgumentException</code>
* if the test result is <code>false</code>.
* <pre class="code">Assert.isTrue(i > 0, "The value must be greater than zero");</pre>
* @param expression a boolean expression
* @param message the exception message to use if the assertion fails
* @throws IllegalArgumentException if expression is <code>false</code>
*/
public static void isTrue(boolean expression, String message) {
if (!expression) {
throw new IllegalArgumentException(message);
}
}
/**
* Assert a boolean expression, throwing <code>IllegalArgumentException</code>
* if the test result is <code>false</code>.
* <pre class="code">Assert.isTrue(i > 0);</pre>
* @param expression a boolean expression
* @throws IllegalArgumentException if expression is <code>false</code>
*/
public static void isTrue(boolean expression) {
isTrue(expression, "[Assertion failed] - this expression must be true");
}
/**
* Assert that an object is <code>null</code> .
* <pre class="code">Assert.isNull(value, "The value must be null");</pre>
* @param object the object to check
* @param message the exception message to use if the assertion fails
* @throws IllegalArgumentException if the object is not <code>null</code>
*/
public static void isNull(Object object, String message) {
if (object != null) {
throw new IllegalArgumentException(message);
}
}
/**
* Assert that an object is <code>null</code> .
* <pre class="code">Assert.isNull(value);</pre>
* @param object the object to check
* @throws IllegalArgumentException if the object is not <code>null</code>
*/
public static void isNull(Object object) {
isNull(object, "[Assertion failed] - the object argument must be null");
}
/**
* Assert that an object is not <code>null</code> .
* <pre class="code">Assert.notNull(clazz, "The class must not be null");</pre>
* @param object the object to check
* @param message the exception message to use if the assertion fails
* @throws IllegalArgumentException if the object is <code>null</code>
*/
public static void notNull(Object object, String message) {
if (object == null) {
throw new IllegalArgumentException(message);
}
}
/**
* Assert that an object is not <code>null</code> .
* <pre class="code">Assert.notNull(clazz);</pre>
* @param object the object to check
* @throws IllegalArgumentException if the object is <code>null</code>
*/
public static void notNull(Object object) {
notNull(object, "[Assertion failed] - this argument is required; it must not be null");
}
/**
* Assert that the given String is not empty; that is,
* it must not be <code>null</code> and not the empty String.
* <pre class="code">Assert.hasLength(name, "Name must not be empty");</pre>
* @param text the String to check
* @param message the exception message to use if the assertion fails
* @see StringUtils#hasLength
*/
public static void hasLength(String text, String message) {
if (!StringUtils.hasLength(text)) {
throw new IllegalArgumentException(message);
}
}
/**
* Assert that the given String is not empty; that is,
* it must not be <code>null</code> and not the empty String.
* <pre class="code">Assert.hasLength(name);</pre>
* @param text the String to check
* @see StringUtils#hasLength
*/
public static void hasLength(String text) {
hasLength(text,
"[Assertion failed] - this String argument must have length; it must not be <code>null</code> or empty");
}
/**
* Assert that the given String has valid text content; that is, it must not
* be <code>null</code> and must contain at least one non-whitespace character.
* <pre class="code">Assert.hasText(name, "'name' must not be empty");</pre>
* @param text the String to check
* @param message the exception message to use if the assertion fails
* @see StringUtils#hasText
*/
public static void hasText(String text, String message) {
if (!StringUtils.hasText(text)) {
throw new IllegalArgumentException(message);
}
}
/**
* Assert that the given String has valid text content; that is, it must not
* be <code>null</code> and must contain at least one non-whitespace character.
* <pre class="code">Assert.hasText(name, "'name' must not be empty");</pre>
* @param text the String to check
* @see StringUtils#hasText
*/
public static void hasText(String text) {
hasText(text,
"[Assertion failed] - this String argument must have text; it must not be <code>null</code>, empty, or blank");
}
/**
* Assert that the given text does not contain the given substring.
* <pre class="code">Assert.doesNotContain(name, "rod", "Name must not contain 'rod'");</pre>
* @param textToSearch the text to search
* @param substring the substring to find within the text
* @param message the exception message to use if the assertion fails
*/
public static void doesNotContain(String textToSearch, String substring, String message) {
if (StringUtils.hasLength(textToSearch) && StringUtils.hasLength(substring) &&
textToSearch.indexOf(substring) != -1) {
throw new IllegalArgumentException(message);
}
}
/**
* Assert that the given text does not contain the given substring.
* <pre class="code">Assert.doesNotContain(name, "rod");</pre>
* @param textToSearch the text to search
* @param substring the substring to find within the text
*/
public static void doesNotContain(String textToSearch, String substring) {
doesNotContain(textToSearch, substring,
"[Assertion failed] - this String argument must not contain the substring [" + substring + "]");
}
/**
* Assert that an array has elements; that is, it must not be
* <code>null</code> and must have at least one element.
* <pre class="code">Assert.notEmpty(array, "The array must have elements");</pre>
* @param array the array to check
* @param message the exception message to use if the assertion fails
* @throws IllegalArgumentException if the object array is <code>null</code> or has no elements
*/
public static void notEmpty(Object[] array, String message) {
if (ObjectUtils.isEmpty(array)) {
throw new IllegalArgumentException(message);
}
}
/**
* Assert that an array has elements; that is, it must not be
* <code>null</code> and must have at least one element.
* <pre class="code">Assert.notEmpty(array);</pre>
* @param array the array to check
* @throws IllegalArgumentException if the object array is <code>null</code> or has no elements
*/
public static void notEmpty(Object[] array) {
notEmpty(array, "[Assertion failed] - this array must not be empty: it must contain at least 1 element");
}
/**
* Assert that a collection has elements; that is, it must not be
* <code>null</code> and must have at least one element.
* <pre class="code">Assert.notEmpty(collection, "Collection must have elements");</pre>
* @param collection the collection to check
* @param message the exception message to use if the assertion fails
* @throws IllegalArgumentException if the collection is <code>null</code> or has no elements
*/
public static void notEmpty(Collection collection, String message) {
if (CollectionUtils.isEmpty(collection)) {
throw new IllegalArgumentException(message);
}
}
/**
* Assert that a collection has elements; that is, it must not be
* <code>null</code> and must have at least one element.
* <pre class="code">Assert.notEmpty(collection, "Collection must have elements");</pre>
* @param collection the collection to check
* @throws IllegalArgumentException if the collection is <code>null</code> or has no elements
*/
public static void notEmpty(Collection collection) {
notEmpty(collection,
"[Assertion failed] - this collection must not be empty: it must contain at least 1 element");
}
/**
* Assert that a Map has entries; that is, it must not be <code>null</code>
* and must have at least one entry.
* <pre class="code">Assert.notEmpty(map, "Map must have entries");</pre>
* @param map the map to check
* @param message the exception message to use if the assertion fails
* @throws IllegalArgumentException if the map is <code>null</code> or has no entries
*/
public static void notEmpty(Map map, String message) {
if (CollectionUtils.isEmpty(map)) {
throw new IllegalArgumentException(message);
}
}
/**
* Assert that a Map has entries; that is, it must not be <code>null</code>
* and must have at least one entry.
* <pre class="code">Assert.notEmpty(map);</pre>
* @param map the map to check
* @throws IllegalArgumentException if the map is <code>null</code> or has no entries
*/
public static void notEmpty(Map map) {
notEmpty(map, "[Assertion failed] - this map must not be empty; it must contain at least one entry");
}
/**
* Assert that the provided object is an instance of the provided class.
* <pre class="code">Assert.instanceOf(Foo.class, foo);</pre>
* @param clazz the required class
* @param obj the object to check
* @throws IllegalArgumentException if the object is not an instance of clazz
* @see Class#isInstance
*/
public static void isInstanceOf(Class clazz, Object obj) {
isInstanceOf(clazz, obj, "");
}
/**
* Assert that the provided object is an instance of the provided class.
* <pre class="code">Assert.instanceOf(Foo.class, foo);</pre>
* @param type the type to check against
* @param obj the object to check
* @param message a message which will be prepended to the message produced by
* the function itself, and which may be used to provide context. It should
* normally end in a ": " or ". " so that the function generate message looks
* ok when prepended to it.
* @throws IllegalArgumentException if the object is not an instance of clazz
* @see Class#isInstance
*/
public static void isInstanceOf(Class type, Object obj, String message) {
notNull(type, "Type to check against must not be null");
if (!type.isInstance(obj)) {
throw new IllegalArgumentException(message +
"Object of class [" + (obj != null ? obj.getClass().getName() : "null") +
"] must be an instance of " + type);
}
}
/**
* Assert that <code>superType.isAssignableFrom(subType)</code> is <code>true</code>.
* <pre class="code">Assert.isAssignable(Number.class, myClass);</pre>
* @param superType the super type to check
* @param subType the sub type to check
* @throws IllegalArgumentException if the classes are not assignable
*/
public static void isAssignable(Class superType, Class subType) {
isAssignable(superType, subType, "");
}
/**
* Assert that <code>superType.isAssignableFrom(subType)</code> is <code>true</code>.
* <pre class="code">Assert.isAssignable(Number.class, myClass);</pre>
* @param superType the super type to check against
* @param subType the sub type to check
* @param message a message which will be prepended to the message produced by
* the function itself, and which may be used to provide context. It should
* normally end in a ": " or ". " so that the function generate message looks
* ok when prepended to it.
* @throws IllegalArgumentException if the classes are not assignable
*/
public static void isAssignable(Class superType, Class subType, String message) {
notNull(superType, "Type to check against must not be null");
if (subType == null || !superType.isAssignableFrom(subType)) {
throw new IllegalArgumentException(message + subType + " is not assignable to " + superType);
}
}
/**
* Assert a boolean expression, throwing <code>IllegalStateException</code>
* if the test result is <code>false</code>. Call isTrue if you wish to
* throw IllegalArgumentException on an assertion failure.
* <pre class="code">Assert.state(id == null, "The id property must not already be initialized");</pre>
* @param expression a boolean expression
* @param message the exception message to use if the assertion fails
* @throws IllegalStateException if expression is <code>false</code>
*/
public static void state(boolean expression, String message) {
if (!expression) {
throw new IllegalStateException(message);
}
}
/**
* Assert a boolean expression, throwing {@link IllegalStateException}
* if the test result is <code>false</code>.
* <p>Call {@link #isTrue(boolean)} if you wish to
* throw {@link IllegalArgumentException} on an assertion failure.
* <pre class="code">Assert.state(id == null);</pre>
* @param expression a boolean expression
* @throws IllegalStateException if the supplied expression is <code>false</code>
*/
public static void state(boolean expression) {
state(expression, "[Assertion failed] - this state invariant must be true");
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.worker.block.evictor;
import alluxio.Configuration;
import alluxio.PropertyKey;
import alluxio.collections.Pair;
import alluxio.worker.block.BlockMetadataManagerView;
import alluxio.worker.block.BlockStoreLocation;
import alluxio.worker.block.allocator.Allocator;
import alluxio.worker.block.meta.BlockMeta;
import alluxio.worker.block.meta.StorageDirView;
import alluxio.worker.block.meta.StorageTierView;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import io.netty.util.internal.chmv8.ConcurrentHashMapV8;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicLong;
import javax.annotation.concurrent.NotThreadSafe;
import javax.annotation.Nullable;
/**
* This class is used to evict blocks by LRFU. LRFU evict blocks with minimum CRF, where CRF of a
* block is the sum of F(t) = pow(1.0 / {@link #mAttenuationFactor}, t * {@link #mStepFactor}).
* Each access to a block has a F(t) value and t is the time interval since that access to current.
* As the formula of F(t) shows, when (1.0 / {@link #mStepFactor}) time units passed, F(t) will
* cut to the (1.0 / {@link #mAttenuationFactor}) of the old value. So {@link #mStepFactor}
* controls the step and {@link #mAttenuationFactor} controls the attenuation. Actually, LRFU
* combines LRU and LFU, it evicts blocks with small frequency or large recency. When
* {@link #mStepFactor} is close to 0, LRFU is close to LFU. Conversely, LRFU is close to LRU
* when {@link #mStepFactor} is close to 1.
*/
@NotThreadSafe
public final class LRFUEvictor extends AbstractEvictor {
/** Map from block id to the last updated logic time count. */
private final Map<Long, Long> mBlockIdToLastUpdateTime = new ConcurrentHashMapV8<>();
// Map from block id to the CRF value of the block
private final Map<Long, Double> mBlockIdToCRFValue = new ConcurrentHashMapV8<>();
/** In the range of [0, 1]. Closer to 0, LRFU closer to LFU. Closer to 1, LRFU closer to LRU. */
private final double mStepFactor;
/** The attenuation factor is in the range of [2, INF]. */
private final double mAttenuationFactor;
/** Logic time count. */
private AtomicLong mLogicTimeCount = new AtomicLong(0L);
/**
* Creates a new instance of {@link LRFUEvictor}.
*
* @param view a view of block metadata information
* @param allocator an allocation policy
*/
public LRFUEvictor(BlockMetadataManagerView view, Allocator allocator) {
super(view, allocator);
mStepFactor = Configuration.getDouble(PropertyKey.WORKER_EVICTOR_LRFU_STEP_FACTOR);
mAttenuationFactor =
Configuration.getDouble(PropertyKey.WORKER_EVICTOR_LRFU_ATTENUATION_FACTOR);
Preconditions.checkArgument(mStepFactor >= 0.0 && mStepFactor <= 1.0,
"Step factor should be in the range of [0.0, 1.0]");
Preconditions.checkArgument(mAttenuationFactor >= 2.0,
"Attenuation factor should be no less than 2.0");
// Preloading blocks
for (StorageTierView tier : mManagerView.getTierViews()) {
for (StorageDirView dir : tier.getDirViews()) {
for (BlockMeta block : dir.getEvictableBlocks()) {
mBlockIdToLastUpdateTime.put(block.getBlockId(), 0L);
mBlockIdToCRFValue.put(block.getBlockId(), 0.0);
}
}
}
}
/**
* Calculates weight of an access, which is the function value of
* F(t) = pow (1.0 / {@link #mAttenuationFactor}, t * {@link #mStepFactor}).
*
* @param logicTimeInterval time interval since that access to current
* @return Function value of F(t)
*/
private double calculateAccessWeight(long logicTimeInterval) {
return Math.pow(1.0 / mAttenuationFactor, logicTimeInterval * mStepFactor);
}
@Nullable
@Override
public EvictionPlan freeSpaceWithView(long bytesToBeAvailable, BlockStoreLocation location,
BlockMetadataManagerView view) {
synchronized (mBlockIdToLastUpdateTime) {
updateCRFValue();
mManagerView = view;
List<BlockTransferInfo> toMove = new ArrayList<>();
List<Pair<Long, BlockStoreLocation>> toEvict = new ArrayList<>();
EvictionPlan plan = new EvictionPlan(toMove, toEvict);
StorageDirView candidateDir = cascadingEvict(bytesToBeAvailable, location, plan);
mManagerView.clearBlockMarks();
if (candidateDir == null) {
return null;
}
return plan;
}
}
@Override
protected Iterator<Long> getBlockIterator() {
return Iterators.transform(getSortedCRF().iterator(),
new Function<Map.Entry<Long, Double>, Long>() {
@Override
public Long apply(Entry<Long, Double> input) {
return input.getKey();
}
});
}
/**
* Sorts all blocks in ascending order of CRF.
*
* @return the sorted CRF of all blocks
*/
private List<Map.Entry<Long, Double>> getSortedCRF() {
List<Map.Entry<Long, Double>> sortedCRF = new ArrayList<>(mBlockIdToCRFValue.entrySet());
Collections.sort(sortedCRF, new Comparator<Map.Entry<Long, Double>>() {
@Override
public int compare(Entry<Long, Double> o1, Entry<Long, Double> o2) {
return Double.compare(o1.getValue(), o2.getValue());
}
});
return sortedCRF;
}
@Override
public void onAccessBlock(long userId, long blockId) {
updateOnAccessAndCommit(blockId);
}
@Override
public void onCommitBlock(long userId, long blockId, BlockStoreLocation location) {
updateOnAccessAndCommit(blockId);
}
@Override
public void onRemoveBlockByClient(long userId, long blockId) {
updateOnRemoveBlock(blockId);
}
@Override
public void onRemoveBlockByWorker(long userId, long blockId) {
updateOnRemoveBlock(blockId);
}
@Override
protected void onRemoveBlockFromIterator(long blockId) {
mBlockIdToLastUpdateTime.remove(blockId);
mBlockIdToCRFValue.remove(blockId);
}
/**
* This function is used to update CRF of all the blocks according to current logic time. When
* some block is accessed in some time, only CRF of that block itself will be updated to current
* time, other blocks who are not accessed recently will only be updated until
* {@link #freeSpaceWithView(long, BlockStoreLocation, BlockMetadataManagerView)} is called
* because blocks need to be sorted in the increasing order of CRF. When this function is called,
* {@link #mBlockIdToLastUpdateTime} and {@link #mBlockIdToCRFValue} need to be locked in case
* of the changing of values.
*/
private void updateCRFValue() {
long currentLogicTime = mLogicTimeCount.get();
for (Entry<Long, Double> entry : mBlockIdToCRFValue.entrySet()) {
long blockId = entry.getKey();
double crfValue = entry.getValue();
mBlockIdToCRFValue.put(blockId, crfValue
* calculateAccessWeight(currentLogicTime - mBlockIdToLastUpdateTime.get(blockId)));
mBlockIdToLastUpdateTime.put(blockId, currentLogicTime);
}
}
/**
* Updates {@link #mBlockIdToLastUpdateTime} and {@link #mBlockIdToCRFValue} when block is
* accessed or committed. Only CRF of the accessed or committed block will be updated, CRF
* of other blocks will be lazily updated (only when {@link #updateCRFValue()} is called).
* If the block is updated at the first time, CRF of the block will be set to 1.0, otherwise
* the CRF of the block will be set to {1.0 + old CRF * F(current time - last update time)}.
*
* @param blockId id of the block to be accessed or committed
*/
private void updateOnAccessAndCommit(long blockId) {
synchronized (mBlockIdToLastUpdateTime) {
long currentLogicTime = mLogicTimeCount.incrementAndGet();
// update CRF value
// CRF(currentLogicTime)=CRF(lastUpdateTime)*F(currentLogicTime-lastUpdateTime)+F(0)
if (mBlockIdToCRFValue.containsKey(blockId)) {
mBlockIdToCRFValue.put(blockId, mBlockIdToCRFValue.get(blockId)
* calculateAccessWeight(currentLogicTime - mBlockIdToLastUpdateTime.get(blockId))
+ 1.0);
} else {
mBlockIdToCRFValue.put(blockId, 1.0);
}
// update currentLogicTime to lastUpdateTime
mBlockIdToLastUpdateTime.put(blockId, currentLogicTime);
}
}
/**
* Updates {@link #mBlockIdToLastUpdateTime} and {@link #mBlockIdToCRFValue} when block is
* removed.
*
* @param blockId id of the block to be removed
*/
private void updateOnRemoveBlock(long blockId) {
synchronized (mBlockIdToLastUpdateTime) {
mLogicTimeCount.incrementAndGet();
mBlockIdToCRFValue.remove(blockId);
mBlockIdToLastUpdateTime.remove(blockId);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms;
import org.elasticsearch.Version;
import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.transform.transforms.latest.LatestConfig;
import org.elasticsearch.client.transform.transforms.pivot.PivotConfig;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
import java.time.Instant;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class TransformConfig implements ToXContentObject {
public static final ParseField ID = new ParseField("id");
public static final ParseField SOURCE = new ParseField("source");
public static final ParseField DEST = new ParseField("dest");
public static final ParseField FREQUENCY = new ParseField("frequency");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField SYNC = new ParseField("sync");
public static final ParseField SETTINGS = new ParseField("settings");
public static final ParseField VERSION = new ParseField("version");
public static final ParseField CREATE_TIME = new ParseField("create_time");
// types of transforms
public static final ParseField PIVOT_TRANSFORM = new ParseField("pivot");
public static final ParseField LATEST_TRANSFORM = new ParseField("latest");
private final String id;
private final SourceConfig source;
private final DestConfig dest;
private final TimeValue frequency;
private final SyncConfig syncConfig;
private final SettingsConfig settings;
private final PivotConfig pivotConfig;
private final LatestConfig latestConfig;
private final String description;
private final Version transformVersion;
private final Instant createTime;
public static final ConstructingObjectParser<TransformConfig, Void> PARSER = new ConstructingObjectParser<>(
"transform",
true,
(args) -> {
String id = (String) args[0];
SourceConfig source = (SourceConfig) args[1];
DestConfig dest = (DestConfig) args[2];
TimeValue frequency = (TimeValue) args[3];
SyncConfig syncConfig = (SyncConfig) args[4];
PivotConfig pivotConfig = (PivotConfig) args[5];
LatestConfig latestConfig = (LatestConfig) args[6];
String description = (String) args[7];
SettingsConfig settings = (SettingsConfig) args[8];
Instant createTime = (Instant) args[9];
String transformVersion = (String) args[10];
return new TransformConfig(
id,
source,
dest,
frequency,
syncConfig,
pivotConfig,
latestConfig,
description,
settings,
createTime,
transformVersion
);
}
);
static {
PARSER.declareString(constructorArg(), ID);
PARSER.declareObject(constructorArg(), (p, c) -> SourceConfig.PARSER.apply(p, null), SOURCE);
PARSER.declareObject(constructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DEST);
PARSER.declareField(
optionalConstructorArg(),
p -> TimeValue.parseTimeValue(p.text(), FREQUENCY.getPreferredName()),
FREQUENCY,
ObjectParser.ValueType.STRING
);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseSyncConfig(p), SYNC);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p), PIVOT_TRANSFORM);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> LatestConfig.fromXContent(p), LATEST_TRANSFORM);
PARSER.declareString(optionalConstructorArg(), DESCRIPTION);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SettingsConfig.fromXContent(p), SETTINGS);
PARSER.declareField(
optionalConstructorArg(),
p -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()),
CREATE_TIME,
ObjectParser.ValueType.VALUE
);
PARSER.declareString(optionalConstructorArg(), VERSION);
}
private static SyncConfig parseSyncConfig(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser);
SyncConfig syncConfig = parser.namedObject(SyncConfig.class, parser.currentName(), true);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser);
return syncConfig;
}
public static TransformConfig fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
/**
* Helper method for previewing a transform configuration
*
* The TransformConfig returned from this method should only be used for previewing the resulting data.
*
* A new, valid, TransformConfig with an appropriate destination and ID will have to be constructed to create
* the transform.
* @param source Source configuration for gathering the data
* @param pivotConfig Config to preview
* @return A TransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
*/
public static TransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
return new TransformConfig(null, source, null, null, null, pivotConfig, null, null, null, null, null);
}
/**
* Helper method for previewing a transform configuration
*
* The TransformConfig returned from this method should only be used for previewing the resulting data.
*
* A new, valid, TransformConfig with an appropriate destination and ID will have to be constructed to create
* the transform.
* @param source Source configuration for gathering the data
* @param latestConfig Config to preview
* @return A TransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
*/
public static TransformConfig forPreview(final SourceConfig source, final LatestConfig latestConfig) {
return new TransformConfig(null, source, null, null, null, null, latestConfig, null, null, null, null);
}
TransformConfig(
final String id,
final SourceConfig source,
final DestConfig dest,
final TimeValue frequency,
final SyncConfig syncConfig,
final PivotConfig pivotConfig,
final LatestConfig latestConfig,
final String description,
final SettingsConfig settings,
final Instant createTime,
final String version
) {
this.id = id;
this.source = source;
this.dest = dest;
this.frequency = frequency;
this.syncConfig = syncConfig;
this.pivotConfig = pivotConfig;
this.latestConfig = latestConfig;
this.description = description;
this.settings = settings;
this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli());
this.transformVersion = version == null ? null : Version.fromString(version);
}
public String getId() {
return id;
}
public SourceConfig getSource() {
return source;
}
public DestConfig getDestination() {
return dest;
}
public TimeValue getFrequency() {
return frequency;
}
public SyncConfig getSyncConfig() {
return syncConfig;
}
public PivotConfig getPivotConfig() {
return pivotConfig;
}
public LatestConfig getLatestConfig() {
return latestConfig;
}
public Version getVersion() {
return transformVersion;
}
public Instant getCreateTime() {
return createTime;
}
@Nullable
public String getDescription() {
return description;
}
@Nullable
public SettingsConfig getSettings() {
return settings;
}
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
if (id != null) {
builder.field(ID.getPreferredName(), id);
}
if (source != null) {
builder.field(SOURCE.getPreferredName(), source);
}
if (dest != null) {
builder.field(DEST.getPreferredName(), dest);
}
if (frequency != null) {
builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep());
}
if (syncConfig != null) {
builder.startObject(SYNC.getPreferredName());
builder.field(syncConfig.getName(), syncConfig);
builder.endObject();
}
if (pivotConfig != null) {
builder.field(PIVOT_TRANSFORM.getPreferredName(), pivotConfig);
}
if (latestConfig != null) {
builder.field(LATEST_TRANSFORM.getPreferredName(), latestConfig);
}
if (description != null) {
builder.field(DESCRIPTION.getPreferredName(), description);
}
if (settings != null) {
builder.field(SETTINGS.getPreferredName(), settings);
}
if (createTime != null) {
builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli());
}
if (transformVersion != null) {
builder.field(VERSION.getPreferredName(), transformVersion);
}
builder.endObject();
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final TransformConfig that = (TransformConfig) other;
return Objects.equals(this.id, that.id)
&& Objects.equals(this.source, that.source)
&& Objects.equals(this.dest, that.dest)
&& Objects.equals(this.frequency, that.frequency)
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.syncConfig, that.syncConfig)
&& Objects.equals(this.transformVersion, that.transformVersion)
&& Objects.equals(this.settings, that.settings)
&& Objects.equals(this.createTime, that.createTime)
&& Objects.equals(this.pivotConfig, that.pivotConfig)
&& Objects.equals(this.latestConfig, that.latestConfig);
}
@Override
public int hashCode() {
return Objects.hash(
id, source, dest, frequency, syncConfig, settings, createTime, transformVersion, pivotConfig, latestConfig, description);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String id;
private SourceConfig source;
private DestConfig dest;
private TimeValue frequency;
private SyncConfig syncConfig;
private PivotConfig pivotConfig;
private LatestConfig latestConfig;
private SettingsConfig settings;
private String description;
public Builder setId(String id) {
this.id = id;
return this;
}
public Builder setSource(SourceConfig source) {
this.source = source;
return this;
}
public Builder setDest(DestConfig dest) {
this.dest = dest;
return this;
}
public Builder setFrequency(TimeValue frequency) {
this.frequency = frequency;
return this;
}
public Builder setSyncConfig(SyncConfig syncConfig) {
this.syncConfig = syncConfig;
return this;
}
public Builder setPivotConfig(PivotConfig pivotConfig) {
this.pivotConfig = pivotConfig;
return this;
}
public Builder setLatestConfig(LatestConfig latestConfig) {
this.latestConfig = latestConfig;
return this;
}
public Builder setDescription(String description) {
this.description = description;
return this;
}
public Builder setSettings(SettingsConfig settings) {
this.settings = settings;
return this;
}
public TransformConfig build() {
return new TransformConfig(
id, source, dest, frequency, syncConfig, pivotConfig, latestConfig, description, settings, null, null);
}
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.inspections;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.intellij.codeInspection.LocalInspectionToolSession;
import com.intellij.codeInspection.ProblemsHolder;
import com.intellij.codeInspection.SuppressIntentionAction;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.util.PsiTreeUtil;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.documentation.DocStringUtil;
import com.jetbrains.python.inspections.quickfix.DocstringQuickFix;
import com.jetbrains.python.inspections.quickfix.PySuppressInspectionFix;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.testing.PythonUnitTestUtil;
import com.jetbrains.python.toolbox.Substring;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author Alexey.Ivanov
*/
public class PyDocstringInspection extends PyInspection {
@Nls
@NotNull
@Override
public String getDisplayName() {
return PyBundle.message("INSP.NAME.docstring");
}
@Override
public boolean isEnabledByDefault() {
return false;
}
@NotNull
@Override
public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder,
boolean isOnTheFly,
@NotNull LocalInspectionToolSession session) {
return new Visitor(holder, session);
}
public static class Visitor extends PyInspectionVisitor {
public Visitor(@Nullable ProblemsHolder holder, @NotNull LocalInspectionToolSession session) {
super(holder, session);
}
@Override
public void visitPyFile(PyFile node) {
checkDocString(node);
}
@Override
public void visitPyFunction(PyFunction node) {
if (PythonUnitTestUtil.isUnitTestCaseFunction(node)) return;
final PyClass containingClass = node.getContainingClass();
if (containingClass != null && PythonUnitTestUtil.isUnitTestCaseClass(containingClass)) return;
final String name = node.getName();
if (name != null && !name.startsWith("_")) checkDocString(node);
}
@Override
public void visitPyClass(PyClass node) {
if (PythonUnitTestUtil.isUnitTestCaseClass(node)) return;
final String name = node.getName();
if (name == null || name.startsWith("_")) {
return;
}
for (PyInspectionExtension extension : Extensions.getExtensions(PyInspectionExtension.EP_NAME)) {
if (extension.ignoreMissingDocstring(node)) {
return;
}
}
checkDocString(node);
}
private void checkDocString(PyDocStringOwner node) {
final PyStringLiteralExpression docStringExpression = node.getDocStringExpression();
if (docStringExpression == null) {
PsiElement marker = null;
if (node instanceof PyClass) {
final ASTNode n = ((PyClass)node).getNameNode();
if (n != null) marker = n.getPsi();
}
else if (node instanceof PyFunction) {
final ASTNode n = ((PyFunction)node).getNameNode();
if (n != null) marker = n.getPsi();
}
else if (node instanceof PyFile) {
TextRange tr = new TextRange(0, 0);
ProblemsHolder holder = getHolder();
if (holder != null) {
holder.registerProblem(node, tr, PyBundle.message("INSP.no.docstring"));
}
return;
}
if (marker == null) marker = node;
if (node instanceof PyFunction || (node instanceof PyClass && ((PyClass)node).findInitOrNew(false) != null)) {
registerProblem(marker, PyBundle.message("INSP.no.docstring"), new DocstringQuickFix(null, null));
}
else {
registerProblem(marker, PyBundle.message("INSP.no.docstring"));
}
}
else {
boolean registered = checkParameters(node, docStringExpression);
if (!registered && StringUtil.isEmptyOrSpaces(docStringExpression.getStringValue())) {
registerProblem(docStringExpression, PyBundle.message("INSP.empty.docstring"));
}
}
}
private boolean checkParameters(PyDocStringOwner pyDocStringOwner, PyStringLiteralExpression node) {
final String text = node.getText();
if (text == null) {
return false;
}
StructuredDocString docString = DocStringUtil.parse(text);
if (docString == null) {
return false;
}
List<Substring> docstringParams = docString.getParameterSubstrings();
if (docstringParams == null) {
return false;
}
if (pyDocStringOwner instanceof PyFunction) {
PyDecoratorList decoratorList = ((PyFunction)pyDocStringOwner).getDecoratorList();
boolean isClassMethod = false;
if (decoratorList != null) {
isClassMethod = decoratorList.findDecorator(PyNames.CLASSMETHOD) != null;
}
PyParameter[] realParams = ((PyFunction)pyDocStringOwner).getParameterList().getParameters();
List<PyParameter> missingParams = getMissingParams(realParams, docstringParams, isClassMethod);
boolean registered = false;
if (!missingParams.isEmpty()) {
for (PyParameter param : missingParams) {
registerProblem(param, "Missing parameter " + param.getName() + " in docstring",
new DocstringQuickFix(param.getName(), null));
}
registered = true;
}
List<Substring> unexpectedParams = getUnexpectedParams(docstringParams, realParams, node);
if (!unexpectedParams.isEmpty()) {
for (Substring param : unexpectedParams) {
ProblemsHolder holder = getHolder();
if (holder != null) {
holder.registerProblem(node, param.getTextRange(),
"Unexpected parameter " + param + " in docstring",
new DocstringQuickFix(null, param.getValue()));
}
}
registered = true;
}
return registered;
}
return false;
}
private static List<Substring> getUnexpectedParams(List<Substring> docstringParams,
PyParameter[] realParams,
PyStringLiteralExpression node) {
Map<String, Substring> unexpected = Maps.newHashMap();
for (Substring s : docstringParams) {
unexpected.put(s.getValue(), s);
}
for (PyParameter p : realParams) {
if (unexpected.containsKey(p.getName())) {
unexpected.remove(p.getName());
}
}
return Lists.newArrayList(unexpected.values());
}
private static List<PyParameter> getMissingParams(PyParameter[] realParams, List<Substring> docstringParams, boolean isClassMethod) {
List<PyParameter> missing = new ArrayList<PyParameter>();
Set<String> params = Sets.newHashSet(Lists.transform(docstringParams, new Function<Substring, String>() {
@Override
public String apply(Substring input) {
return input.getValue();
}
}));
boolean hasMissing = false;
for (PyParameter p : realParams) {
String paramText = p.getText();
if ((!isClassMethod && !paramText.equals(PyNames.CANONICAL_SELF)) && !paramText.equals("*") ||
(isClassMethod && !paramText.equals("cls"))) {
if (!params.contains(p.getName())) {
hasMissing = true;
missing.add(p);
}
}
}
return hasMissing ? missing : Collections.<PyParameter>emptyList();
}
}
@Override
public SuppressIntentionAction[] getSuppressActions(@Nullable PsiElement element) {
List<SuppressIntentionAction> result = new ArrayList<SuppressIntentionAction>();
if (element != null) {
if (PsiTreeUtil.getParentOfType(element, PyFunction.class) != null) {
result.add(new PySuppressInspectionFix(getShortName().replace("Inspection", ""), "Suppress for function", PyFunction.class));
}
if (PsiTreeUtil.getParentOfType(element, PyClass.class) != null) {
result.add(new PySuppressInspectionFix(getShortName().replace("Inspection", ""), "Suppress for class", PyClass.class));
}
}
return result.toArray(new SuppressIntentionAction[result.size()]);
}
}
| |
package io.vertx.sqlclient;
import junit.framework.AssertionFailedError;
import java.io.Serializable;
import java.lang.invoke.SerializedLambda;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Supplier;
import static org.junit.Assert.*;
public class ColumnChecker {
private static List<SerializableBiFunction<Tuple, Integer, ?>> tupleMethods = new ArrayList<>();
private static List<SerializableBiFunction<Row, String, ?>> rowMethods = new ArrayList<>();
public static SerializableBiFunction<Tuple, Integer, Object> getByIndex(Class<?> type) {
return (tuple, index) -> tuple.get(type, index);
}
public static SerializableBiFunction<Row, String, Object> getByName(Class<?> type) {
return (row, index) -> {
int idx = row.getColumnIndex(index);
return idx == -1 ? null : row.get(type, idx);
};
}
public static SerializableBiFunction<Tuple, Integer, Object> getValuesByIndex(Class<?> type) {
return (tuple, index) -> tuple.get(Array.newInstance(type, 0).getClass(), index);
}
public static SerializableBiFunction<Row, String, Object> getValuesByName(Class<?> type) {
return (row, index) -> {
int idx = row.getColumnIndex(index);
return idx == -1 ? null : row.get(Array.newInstance(type, 0).getClass(), idx);
};
}
public static void load(Supplier<List<SerializableBiFunction<Tuple, Integer, ?>>> tupleMethodsFactory,
Supplier<List<SerializableBiFunction<Row, String, ?>>> rowMethodsFactory) {
tupleMethods = tupleMethodsFactory.get();
rowMethods = rowMethodsFactory.get();
}
public static ColumnChecker checkColumn(int index, String name) {
return new ColumnChecker(index, name);
}
private final List<Method> blackList = new ArrayList<>();
private final List<Consumer<? super Row>> expects = new ArrayList<>();
private final int index;
private final String name;
private ColumnChecker(int index, String name) {
this.index = index;
this.name = name;
}
public ColumnChecker returnsNull() {
tupleMethods
.forEach(m -> {
blackList.add(m.method());
expects.add(row -> {
Object v = m.apply(row, index);
assertNull(v);
});
});
rowMethods
.forEach(m -> {
blackList.add(m.method());
expects.add(row -> {
Object v = m.apply(row, name);
assertNull(v);
});
});
return this;
}
public <R> ColumnChecker returns(Class<R> type, R expected) {
return returns(getByIndex(type), getByName(type), expected);
}
public <R> ColumnChecker returns(Class<R> type, R[] expected) {
return returns(getValuesByIndex(type), getValuesByName(type), expected);
}
public <R> ColumnChecker returns(SerializableBiFunction<Tuple, Integer, R> byIndexGetter,
SerializableBiFunction<Row, String, R> byNameGetter,
Consumer<R> check) {
Method byIndexMeth = byIndexGetter.method();
blackList.add(byIndexMeth);
Method byNameMeth = byNameGetter.method();
blackList.add(byNameMeth);
expects.add(row -> {
Object actual = byIndexGetter.apply(row, index);
try {
check.accept((R) actual);
} catch (AssertionError cause) {
AssertionFailedError failure = new AssertionFailedError("Expected that " + byIndexMeth + " would not fail: " + cause.getMessage());
failure.setStackTrace(failure.getStackTrace());
throw failure;
}
actual = byNameGetter.apply(row, name);
try {
check.accept((R) actual);
} catch (AssertionError cause) {
AssertionFailedError failure = new AssertionFailedError("Expected that " + byNameMeth + " would not fail: " + cause.getMessage());
failure.setStackTrace(failure.getStackTrace());
throw failure;
}
});
return this;
}
public <R> ColumnChecker returns(SerializableBiFunction<Tuple, Integer, R> byIndexGetter,
SerializableBiFunction<Row, String, R> byNameGetter,
R expected) {
return this.<R>returns(byIndexGetter, byNameGetter, actual -> assertEquals(expected, actual));
}
public ColumnChecker returns(SerializableBiFunction<Tuple, Integer, Object> byIndexGetter,
SerializableBiFunction<Row, String, Object> byNameGetter,
Object[] expected) {
Method byIndexMeth = byIndexGetter.method();
blackList.add(byIndexMeth);
Method byNameMeth = byNameGetter.method();
blackList.add(byNameMeth);
expects.add(row -> {
Object[] actual = toObjectArray(byIndexGetter.apply(row, index));
assertArrayEquals("Expected that " + byIndexMeth + " returns " + Arrays.toString(expected) + " instead of " + Arrays.toString(actual), expected, actual);
actual = toObjectArray(byNameGetter.apply(row, name));
assertArrayEquals("Expected that " + byNameMeth + " returns " + Arrays.toString(expected) + " instead of " + Arrays.toString(actual), expected, actual);
});
return this;
}
public ColumnChecker skip(SerializableBiFunction<Tuple, Integer, Object> byIndexGetter,
SerializableBiFunction<Row, String, Object> byNameGetter) {
Method byIndexMeth = byIndexGetter.method();
blackList.add(byIndexMeth);
Method byNameMeth = byNameGetter.method();
blackList.add(byNameMeth);
return this;
}
public ColumnChecker returns(SerializableBiFunction<Tuple, Integer, Double> byIndexGetter,
SerializableBiFunction<Row, String, Double> byNameGetter,
double expected, double delta) {
blackList.add(byIndexGetter.method());
blackList.add(byNameGetter.method());
expects.add(row -> {
Object actual = byIndexGetter.apply(row, index);
assertEquals("Expected that " + byIndexGetter.method() + " returns " + expected + " instead of " + actual, expected, (double)actual, delta);
actual = byNameGetter.apply(row, name);
assertEquals("Expected that " + byNameGetter.method() + " returns " + expected + " instead of " + actual, expected, (double)actual, delta);
});
return this;
}
public ColumnChecker returns(SerializableBiFunction<Tuple, Integer, Float> byIndexGetter,
SerializableBiFunction<Row, String, Float> byNameGetter,
float expected, float delta) {
blackList.add(byIndexGetter.method());
blackList.add(byNameGetter.method());
expects.add(row -> {
Object actual = byIndexGetter.apply(row, index);
assertEquals("Expected that " + byIndexGetter.method() + " returns " + expected + " instead of " + actual, expected, (float)actual, delta);
actual = byNameGetter.apply(row, name);
assertEquals("Expected that " + byNameGetter.method() + " returns " + expected + " instead of " + actual, expected, (float)actual, delta);
});
return this;
}
public <R> ColumnChecker fails(SerializableBiFunction<Tuple, Integer, R> byIndexGetter,
SerializableBiFunction<Row, String, R> byNameGetter) {
blackList.add(byIndexGetter.method());
blackList.add(byNameGetter.method());
expects.add(row -> {
try {
byIndexGetter.apply(row, index);
fail("Expected that " + byIndexGetter.method() + " would throw an exception");
} catch (Exception ignore) {
}
try {
byNameGetter.apply(row, name);
fail("Expected that " + byNameGetter.method() + " would throw an exception");
} catch (Exception ignore) {
}
});
return this;
}
public void forRow(Row row) {
for (SerializableBiFunction<Tuple, Integer, ?> m : tupleMethods) {
if (!blackList.contains(m.method())) {
try {
Object v = m.apply(row, index);
v = m.apply(row, index);
fail("Was expecting " + m.method() + " to throw ClassCastException instead of returning " + v);
} catch (ClassCastException ignore) {
}
}
}
for (SerializableBiFunction<Row, String, ?> m : rowMethods) {
if (!blackList.contains(m.method())) {
try {
Object v = m.apply(row, name);
fail("Was expecting " + m.method() + " to throw ClassCastException instead of returning " + v);
} catch (ClassCastException ignore) {
}
}
}
for (Consumer<? super Row> e : expects) {
e.accept(row);
}
}
interface MethodReferenceReflection {
//inspired by: http://benjiweber.co.uk/blog/2015/08/17/lambda-parameter-names-with-reflection/
default SerializedLambda serialized() {
try {
Method replaceMethod = getClass().getDeclaredMethod("writeReplace");
replaceMethod.setAccessible(true);
return (SerializedLambda) replaceMethod.invoke(this);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
default Class getContainingClass() {
try {
String className = serialized().getImplClass().replaceAll("/", ".");
return Class.forName(className);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
default Method method() {
SerializedLambda lambda = serialized();
Class containingClass = getContainingClass();
return Arrays.stream(containingClass.getDeclaredMethods())
.filter(method -> Objects.equals(method.getName(), lambda.getImplMethodName()))
.findFirst()
.orElseThrow(MethodReferenceReflection.UnableToGuessMethodException::new);
}
class UnableToGuessMethodException extends RuntimeException {}
}
public interface SerializableBiFunction<O, T, R> extends BiFunction<O, T, R>, Serializable, MethodReferenceReflection {}
public static Object[] toObjectArray(Object source) {
if (source instanceof Object[]) {
return (Object[]) source;
}
if (source == null) {
return new Object[0];
}
if (!source.getClass().isArray()) {
throw new IllegalArgumentException("Source is not an array: " + source);
}
int length = Array.getLength(source);
if (length == 0) {
return new Object[0];
}
Class wrapperType = Array.get(source, 0).getClass();
Object[] newArray = (Object[]) Array.newInstance(wrapperType, length);
for (int i = 0; i < length; i++) {
newArray[i] = Array.get(source, i);
}
return newArray;
}
}
| |
/*
* Copyright (c) 2001-2008 Caucho Technology, Inc. All rights reserved.
*
* The Apache Software License, Version 1.1
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Caucho Technology (http://www.caucho.com/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "Burlap", "Resin", and "Caucho" must not be used to
* endorse or promote products derived from this software without prior
* written permission. For written permission, please contact
* info@caucho.com.
*
* 5. Products derived from this software may not be called "Resin"
* nor may "Resin" appear in their names without prior written
* permission of Caucho Technology.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL CAUCHO TECHNOLOGY OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
* OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* @author Scott Ferguson
*/
package com.caucho.hessian.io;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.HashMap;
import java.util.logging.*;
/**
* Serializing an object for known object types.
*/
public class JavaDeserializer extends AbstractMapDeserializer {
private static final Logger log
= Logger.getLogger(JavaDeserializer.class.getName());
private Class _type;
private HashMap _fieldMap;
private Method _readResolve;
private Constructor _constructor;
private Object []_constructorArgs;
public JavaDeserializer(Class cl)
{
_type = cl;
_fieldMap = getFieldMap(cl);
_readResolve = getReadResolve(cl);
if (_readResolve != null) {
_readResolve.setAccessible(true);
}
Constructor []constructors = cl.getDeclaredConstructors();
long bestCost = Long.MAX_VALUE;
for (int i = 0; i < constructors.length; i++) {
Class []param = constructors[i].getParameterTypes();
long cost = 0;
for (int j = 0; j < param.length; j++) {
cost = 4 * cost;
if (Object.class.equals(param[j]))
cost += 1;
else if (String.class.equals(param[j]))
cost += 2;
else if (int.class.equals(param[j]))
cost += 3;
else if (long.class.equals(param[j]))
cost += 4;
else if (param[j].isPrimitive())
cost += 5;
else
cost += 6;
}
if (cost < 0 || cost > (1 << 48))
cost = 1 << 48;
cost += (long) param.length << 48;
if (cost < bestCost) {
_constructor = constructors[i];
bestCost = cost;
}
}
if (_constructor != null) {
_constructor.setAccessible(true);
Class []params = _constructor.getParameterTypes();
_constructorArgs = new Object[params.length];
for (int i = 0; i < params.length; i++) {
_constructorArgs[i] = getParamArg(params[i]);
}
}
}
public Class getType()
{
return _type;
}
public Object readMap(AbstractHessianInput in)
throws IOException
{
try {
Object obj = instantiate();
return readMap(in, obj);
} catch (IOException e) {
throw e;
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IOExceptionWrapper(_type.getName() + ":" + e.getMessage(), e);
}
}
public Object readObject(AbstractHessianInput in, String []fieldNames)
throws IOException
{
try {
Object obj = instantiate();
return readObject(in, obj, fieldNames);
} catch (IOException e) {
throw e;
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IOExceptionWrapper(_type.getName() + ":" + e.getMessage(), e);
}
}
/**
* Returns the readResolve method
*/
protected Method getReadResolve(Class cl)
{
for (; cl != null; cl = cl.getSuperclass()) {
Method []methods = cl.getDeclaredMethods();
for (int i = 0; i < methods.length; i++) {
Method method = methods[i];
if (method.getName().equals("readResolve") &&
method.getParameterTypes().length == 0)
return method;
}
}
return null;
}
public Object readMap(AbstractHessianInput in, Object obj)
throws IOException
{
try {
int ref = in.addRef(obj);
while (! in.isEnd()) {
Object key = in.readObject();
FieldDeserializer deser = (FieldDeserializer) _fieldMap.get(key);
if (deser != null)
deser.deserialize(in, obj);
else
in.readObject();
}
in.readMapEnd();
Object resolve = resolve(obj);
if (obj != resolve)
in.setRef(ref, resolve);
return resolve;
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOExceptionWrapper(e);
}
}
public Object readObject(AbstractHessianInput in,
Object obj,
String []fieldNames)
throws IOException
{
try {
int ref = in.addRef(obj);
for (int i = 0; i < fieldNames.length; i++) {
String name = fieldNames[i];
FieldDeserializer deser = (FieldDeserializer) _fieldMap.get(name);
if (deser != null)
deser.deserialize(in, obj);
else
in.readObject();
}
Object resolve = resolve(obj);
if (obj != resolve)
in.setRef(ref, resolve);
return resolve;
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOExceptionWrapper(obj.getClass().getName() + ":" + e, e);
}
}
private Object resolve(Object obj)
throws Exception
{
// if there's a readResolve method, call it
try {
if (_readResolve != null)
return _readResolve.invoke(obj, new Object[0]);
} catch (InvocationTargetException e) {
if (e.getTargetException() != null)
throw e;
}
return obj;
}
protected Object instantiate()
throws Exception
{
try {
if (_constructor != null)
return _constructor.newInstance(_constructorArgs);
else
return _type.newInstance();
} catch (Exception e) {
throw new HessianProtocolException("'" + _type.getName() + "' could not be instantiated", e);
}
}
/**
* Creates a map of the classes fields.
*/
protected HashMap getFieldMap(Class cl)
{
HashMap fieldMap = new HashMap();
for (; cl != null; cl = cl.getSuperclass()) {
Field []fields = cl.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
if (Modifier.isTransient(field.getModifiers())
|| Modifier.isStatic(field.getModifiers()))
continue;
else if (fieldMap.get(field.getName()) != null)
continue;
// XXX: could parameterize the handler to only deal with public
try {
field.setAccessible(true);
} catch (Throwable e) {
e.printStackTrace();
}
Class type = field.getType();
FieldDeserializer deser;
if (String.class.equals(type))
deser = new StringFieldDeserializer(field);
else if (byte.class.equals(type)) {
deser = new ByteFieldDeserializer(field);
}
else if (short.class.equals(type)) {
deser = new ShortFieldDeserializer(field);
}
else if (int.class.equals(type)) {
deser = new IntFieldDeserializer(field);
}
else if (long.class.equals(type)) {
deser = new LongFieldDeserializer(field);
}
else if (float.class.equals(type)) {
deser = new FloatFieldDeserializer(field);
}
else if (double.class.equals(type)) {
deser = new DoubleFieldDeserializer(field);
}
else if (boolean.class.equals(type)) {
deser = new BooleanFieldDeserializer(field);
}
else if (java.sql.Date.class.equals(type)) {
deser = new SqlDateFieldDeserializer(field);
}
else if (java.sql.Timestamp.class.equals(type)) {
deser = new SqlTimestampFieldDeserializer(field);
}
else if (java.sql.Time.class.equals(type)) {
deser = new SqlTimeFieldDeserializer(field);
}
else {
deser = new ObjectFieldDeserializer(field);
}
fieldMap.put(field.getName(), deser);
}
}
return fieldMap;
}
/**
* Creates a map of the classes fields.
*/
protected static Object getParamArg(Class cl)
{
if (! cl.isPrimitive())
return null;
else if (boolean.class.equals(cl))
return Boolean.FALSE;
else if (byte.class.equals(cl))
return new Byte((byte) 0);
else if (short.class.equals(cl))
return new Short((short) 0);
else if (char.class.equals(cl))
return new Character((char) 0);
else if (int.class.equals(cl))
return Integer.valueOf(0);
else if (long.class.equals(cl))
return Long.valueOf(0);
else if (float.class.equals(cl))
return Float.valueOf(0);
else if (double.class.equals(cl))
return Double.valueOf(0);
else
throw new UnsupportedOperationException();
}
abstract static class FieldDeserializer {
abstract void deserialize(AbstractHessianInput in, Object obj)
throws IOException;
}
static class ObjectFieldDeserializer extends FieldDeserializer {
private final Field _field;
ObjectFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
Object value = null;
try {
value = in.readObject(_field.getType());
_field.set(obj, value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class BooleanFieldDeserializer extends FieldDeserializer {
private final Field _field;
BooleanFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
boolean value = false;
try {
value = in.readBoolean();
_field.setBoolean(obj, value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class ByteFieldDeserializer extends FieldDeserializer {
private final Field _field;
ByteFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
int value = 0;
try {
value = in.readInt();
_field.setByte(obj, (byte) value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class ShortFieldDeserializer extends FieldDeserializer {
private final Field _field;
ShortFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
int value = 0;
try {
value = in.readInt();
_field.setShort(obj, (short) value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class IntFieldDeserializer extends FieldDeserializer {
private final Field _field;
IntFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
int value = 0;
try {
value = in.readInt();
_field.setInt(obj, value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class LongFieldDeserializer extends FieldDeserializer {
private final Field _field;
LongFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
long value = 0;
try {
value = in.readLong();
_field.setLong(obj, value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class FloatFieldDeserializer extends FieldDeserializer {
private final Field _field;
FloatFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
double value = 0;
try {
value = in.readDouble();
_field.setFloat(obj, (float) value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class DoubleFieldDeserializer extends FieldDeserializer {
private final Field _field;
DoubleFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
double value = 0;
try {
value = in.readDouble();
_field.setDouble(obj, value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class StringFieldDeserializer extends FieldDeserializer {
private final Field _field;
StringFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
String value = null;
try {
value = in.readString();
_field.set(obj, value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class SqlDateFieldDeserializer extends FieldDeserializer {
private final Field _field;
SqlDateFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
java.sql.Date value = null;
try {
java.util.Date date = (java.util.Date) in.readObject();
value = new java.sql.Date(date.getTime());
_field.set(obj, value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class SqlTimestampFieldDeserializer extends FieldDeserializer {
private final Field _field;
SqlTimestampFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
java.sql.Timestamp value = null;
try {
java.util.Date date = (java.util.Date) in.readObject();
value = new java.sql.Timestamp(date.getTime());
_field.set(obj, value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static class SqlTimeFieldDeserializer extends FieldDeserializer {
private final Field _field;
SqlTimeFieldDeserializer(Field field)
{
_field = field;
}
void deserialize(AbstractHessianInput in, Object obj)
throws IOException
{
java.sql.Time value = null;
try {
java.util.Date date = (java.util.Date) in.readObject();
value = new java.sql.Time(date.getTime());
_field.set(obj, value);
} catch (Exception e) {
logDeserializeError(_field, obj, value, e);
}
}
}
static void logDeserializeError(Field field, Object obj, Object value,
Throwable e)
throws IOException
{
String fieldName = (field.getDeclaringClass().getName()
+ "." + field.getName());
if (e instanceof HessianFieldException)
throw (HessianFieldException) e;
else if (e instanceof IOException)
throw new HessianFieldException(fieldName + ": " + e.getMessage(), e);
if (value != null)
throw new HessianFieldException(fieldName + ": " + value.getClass().getName() + " (" + value + ")"
+ " cannot be assigned to '" + field.getType().getName() + "'", e);
else
throw new HessianFieldException(fieldName + ": " + field.getType().getName() + " cannot be assigned from null", e);
}
}
| |
/**
* Copyright 2005-2016 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.agent;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import io.fabric8.agent.download.DownloadCallback;
import io.fabric8.agent.download.DownloadManager;
import io.fabric8.agent.download.DownloadManagers;
import io.fabric8.agent.download.Downloader;
import io.fabric8.agent.download.StreamProvider;
import io.fabric8.agent.internal.Macro;
import io.fabric8.agent.service.Agent;
import io.fabric8.agent.service.Constants;
import io.fabric8.agent.service.FeatureConfigInstaller;
import io.fabric8.agent.service.State;
import io.fabric8.api.Container;
import io.fabric8.api.CuratorComplete;
import io.fabric8.api.FabricService;
import io.fabric8.api.Profile;
import io.fabric8.common.util.ChecksumUtils;
import io.fabric8.common.util.Files;
import io.fabric8.maven.MavenResolver;
import io.fabric8.maven.MavenResolvers;
import io.fabric8.patch.FabricPatchService;
import io.fabric8.patch.management.PatchManagement;
import io.fabric8.utils.NamedThreadFactory;
import io.fabric8.zookeeper.ZkPath;
import io.fabric8.zookeeper.utils.ZooKeeperUtils;
import org.apache.curator.framework.CuratorFramework;
import org.apache.felix.utils.properties.Properties;
import org.apache.felix.utils.version.VersionRange;
import org.apache.zookeeper.data.Stat;
import org.eclipse.jgit.api.errors.TransportException;
import org.eclipse.jgit.errors.RemoteRepositoryException;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleException;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.wiring.BundleRevision;
import org.osgi.resource.Resource;
import org.osgi.service.cm.ConfigurationAdmin;
import org.osgi.service.cm.ConfigurationException;
import org.osgi.service.cm.ManagedService;
import org.osgi.util.tracker.ServiceTracker;
import org.osgi.util.tracker.ServiceTrackerCustomizer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static io.fabric8.agent.resolver.ResourceUtils.getUri;
import static io.fabric8.agent.service.Constants.DEFAULT_BUNDLE_UPDATE_RANGE;
import static io.fabric8.agent.service.Constants.DEFAULT_FEATURE_RESOLUTION_RANGE;
import static io.fabric8.agent.service.Constants.DEFAULT_UPDATE_SNAPSHOTS;
import static io.fabric8.agent.utils.AgentUtils.addMavenProxies;
import static io.fabric8.zookeeper.utils.ZooKeeperUtils.exists;
public class DeploymentAgent implements ManagedService {
private static final Logger LOGGER = LoggerFactory.getLogger(DeploymentAgent.class);
private static final String DEFAULT_DOWNLOAD_THREADS = "4";
private static final String DOWNLOAD_THREADS = "io.fabric8.agent.download.threads";
private static long agentCounter = 1;
private static final String KARAF_HOME = System.getProperty("karaf.home");
private static final String KARAF_BASE = System.getProperty("karaf.base");
private static final String KARAF_DATA = System.getProperty("karaf.data");
private static final String KARAF_ETC = System.getProperty("karaf.etc");
private static final String SYSTEM_PATH = KARAF_HOME + File.separator + "system";
private static final String LIB_PATH = KARAF_BASE + File.separator + "lib";
private static final String LIB_EXT_PATH = LIB_PATH + File.separator + "ext";
private static final String LIB_ENDORSED_PATH = LIB_PATH + File.separator + "endorsed";
private static final String STATE_FILE = "state.json";
private ServiceTracker<FabricService, FabricService> fabricService;
private ServiceTracker<CuratorComplete, CuratorComplete> curatorCompleteService;
private final ExecutorService executor;
private final ScheduledExecutorService downloadExecutor;
private final BundleContext bundleContext;
private final BundleContext systemBundleContext;
private final Properties libChecksums;
private final Properties endorsedChecksums;
private final Properties extensionChecksums;
private final Properties etcChecksums;
private final Properties managedLibs;
private final Properties managedEndorsedLibs;
private final Properties managedExtensionLibs;
private final Properties managedSysProps;
private final Properties managedConfigProps;
private final Properties managedEtcs;
private volatile String provisioningStatus;
private volatile Throwable provisioningError;
private volatile Collection<Resource> provisionList;
private volatile boolean requiresRestart = false;
private volatile boolean fabricNotAvailableLogged;
private volatile String httpUrl;
private volatile List<URI> mavenRepoURIs = new ArrayList<URI>();
// lock to operate on fabricService.getCurrentContainer().getHttpUrl()
// and service.getMavenRepoURIs()
// see ENTESB-2370: OSE Maven artifacts uploaded to fabric proxy cannot be resolved by containers
private Lock fabricServiceOperations = new ReentrantLock();
private final State state = new State();
private final String deploymentAgentId;
public DeploymentAgent(BundleContext bundleContext) throws IOException {
this.bundleContext = bundleContext;
this.systemBundleContext = bundleContext.getBundle(0).getBundleContext();
this.libChecksums = new Properties(bundleContext.getDataFile("lib-checksums.properties"));
this.endorsedChecksums = new Properties(bundleContext.getDataFile("endorsed-checksums.properties"));
this.extensionChecksums = new Properties(bundleContext.getDataFile("extension-checksums.properties"));
this.etcChecksums = new Properties(bundleContext.getDataFile("etc-checksums.properties"));
this.managedSysProps = new Properties(bundleContext.getDataFile("system.properties"));
this.managedConfigProps = new Properties(bundleContext.getDataFile("config.properties"));
this.managedLibs = new Properties(bundleContext.getDataFile("libs.properties"));
this.managedEndorsedLibs = new Properties(bundleContext.getDataFile("endorsed.properties"));
this.managedExtensionLibs = new Properties(bundleContext.getDataFile("extension.properties"));
this.managedEtcs = new Properties(bundleContext.getDataFile("etc.properties"));
String revision = bundleContext.getBundle().adapt(BundleRevision.class).toString();
deploymentAgentId = String.format("fabric-agent-%s.%s", revision, agentCounter++);
this.executor = Executors.newSingleThreadExecutor(new NamedThreadFactory(deploymentAgentId));
this.downloadExecutor = createDownloadExecutor();
fabricService = new ServiceTracker<>(systemBundleContext, FabricService.class, new ServiceTrackerCustomizer<FabricService, FabricService>() {
@Override
public FabricService addingService(ServiceReference<FabricService> reference) {
FabricService service = systemBundleContext.getService(reference);
if (provisioningStatus != null) {
updateStatus(service, provisioningStatus, provisioningError, true);
}
return service;
}
@Override
public void modifiedService(ServiceReference<FabricService> reference, FabricService service) {
if (provisioningStatus != null) {
updateStatus(service, provisioningStatus, provisioningError, true);
}
}
@Override
public void removedService(ServiceReference<FabricService> reference, FabricService service) {
// TODO: what if Config Admin causes invocation of doUpdate()? should we keep old httpUrl and mavenRepoURIs?
}
});
fabricService.open();
curatorCompleteService = new ServiceTracker<CuratorComplete, CuratorComplete>(systemBundleContext, CuratorComplete.class, null);
curatorCompleteService.open();
}
private void updateMavenRepositoryConfiguration(FabricService service) {
LOGGER.info("Updating Maven Repository Configuration");
try {
fabricServiceOperations.lock();
httpUrl = service.getCurrentContainer().getHttpUrl();
mavenRepoURIs = service.getMavenRepoURIs();
LOGGER.info("Maven repository configuration correctly updated: httpUrl=[{}], mavenRepoURIs=[{}]", httpUrl, mavenRepoURIs);
} catch (RuntimeException e){
LOGGER.info("It's been impossible to correctly update maven repositories configuration");
if(LOGGER.isTraceEnabled()){
LOGGER.trace("Detailed Exception", e);
}
}
finally {
fabricServiceOperations.unlock();
}
}
protected ScheduledExecutorService createDownloadExecutor() {
// TODO: this should not be loaded from a static file
// TODO: or at least from the bundle context, but preferably from the config
String size = DEFAULT_DOWNLOAD_THREADS;
try {
Properties customProps = new Properties(new File(KARAF_BASE + File.separator + "etc" + File.separator + "custom.properties"));
size = customProps.getProperty(DOWNLOAD_THREADS, size);
} catch (Exception e) {
// ignore
}
int num = Integer.parseInt(size);
LOGGER.info("Creating fabric-agent-download thread pool with size: {}", num);
return Executors.newScheduledThreadPool(num, new NamedThreadFactory("fabric-agent-download"));
}
public void start() throws IOException {
LOGGER.info("Starting DeploymentAgent " + deploymentAgentId);
loadLibChecksums(LIB_PATH, libChecksums);
loadLibChecksums(LIB_ENDORSED_PATH, endorsedChecksums);
loadLibChecksums(LIB_EXT_PATH, extensionChecksums);
loadLibChecksums(KARAF_ETC, etcChecksums);
executor.execute(new Runnable() {
@Override
public void run() {
LOGGER.info("DeploymentAgent ready to accept configadmin tasks");
}
});
}
public void stop() throws InterruptedException {
LOGGER.info("Stopping DeploymentAgent " + deploymentAgentId);
// We can't wait for the threads to finish because the agent needs to be able to
// update itself and this would cause a deadlock
synchronized (executor) {
executor.execute(new Runnable() {
@Override
public void run() {
LOGGER.info("DeploymentAgent won't accept new configadmin tasks");
}
});
executor.shutdown();
}
downloadExecutor.shutdown();
fabricService.close();
curatorCompleteService.close();
}
private void loadLibChecksums(String path, Properties props) throws IOException {
File dir = new File(path);
if (!dir.exists() && !dir.mkdirs()) {
throw new IOException("Failed to create fabric lib directory at:" + dir.getAbsolutePath());
}
for (String lib : dir.list()) {
File f = new File(path, lib);
if (f.exists() && f.isFile()) {
props.put(lib, Long.toString(ChecksumUtils.checksum(new FileInputStream(f))));
}
}
props.save();
}
public void updated(final Dictionary<String, ?> props) throws ConfigurationException {
LOGGER.info("DeploymentAgent {} updated with {}", deploymentAgentId, props);
synchronized (executor) {
if (executor.isShutdown() || props == null) {
return;
}
executor.submit(new Runnable() {
public void run() {
Throwable result = null;
boolean success = false;
try {
success = doUpdate(props);
} catch (Throwable e) {
result = e;
LOGGER.error("Unable to update agent", e);
}
// This update is critical, so
if (success || result != null) {
updateStatus(success ? Container.PROVISION_SUCCESS : Container.PROVISION_ERROR, result, true);
}
}
});
}
}
private void updateStatus(String status, Throwable result) {
updateStatus(status, result, false);
}
private void updateStatus(String status, Throwable result, boolean force) {
try {
FabricService fs;
if (force) {
fs = fabricService.waitForService(0);
} else {
fs = fabricService.getService();
}
updateStatus(fs, status, result, force);
} catch (Throwable e) {
LOGGER.warn("Unable to set provisioning result");
}
}
// last time the status was updated
// synchronization is not that important here
private long lastStatusUpdate = 0L;
// ENTESB-3361: we'll be updating status (in ZK) not faster than every UPDATE_INTERVAL ms
private static final long UPDATE_INTERVAL = 2000L;
private void updateStatus(FabricService fs, String status, Throwable result, boolean force/*=false*/) {
if (!force && System.currentTimeMillis() < lastStatusUpdate + UPDATE_INTERVAL) {
return;
}
lastStatusUpdate = System.currentTimeMillis();
try {
provisioningStatus = status;
provisioningError = result;
if (fs != null) {
fabricNotAvailableLogged = false;
Container container = fs.getCurrentContainer();
String e;
if (result == null) {
e = null;
} else {
StringWriter sw = new StringWriter();
result.printStackTrace(new PrintWriter(sw));
e = sw.toString();
}
if (provisionList != null) {
Set<String> uris = new TreeSet<>();
for (Resource res : provisionList) {
uris.add(getUri(res));
}
container.setProvisionList(new ArrayList<>(uris));
}
container.setProvisionResult(status);
container.setProvisionException(e);
java.util.Properties provisionChecksums = new java.util.Properties();
for (Map.Entry<Long, Long> entry : state.bundleChecksums.entrySet()) {
Bundle bundle = systemBundleContext.getBundle(entry.getKey());
String location = bundle.getLocation();
provisionChecksums.put(location, entry.getValue().toString());
}
/*
putAllProperties(provisionChecksums, libChecksums);
putAllProperties(provisionChecksums, endorsedChecksums);
putAllProperties(provisionChecksums, extensionChecksums);
*/
container.setProvisionChecksums(provisionChecksums);
} else {
if (!fabricNotAvailableLogged) {
fabricNotAvailableLogged = true;
LOGGER.info("Unable to set provisioning status as FabricService is not available");
}
}
} catch (Throwable e) {
LOGGER.warn("Unable to set provisioning result");
}
}
protected static void putAllProperties(java.util.Properties answer, Properties properties) {
Set<Map.Entry<String, String>> entries = properties.entrySet();
for (Map.Entry<String, String> entry : entries) {
answer.put(entry.getKey(), entry.getValue());
}
}
public boolean doUpdate(Dictionary<String, ?> props) throws Exception {
if (props == null || Boolean.parseBoolean((String) props.get("disabled"))) {
return false;
}
final Hashtable<String, String> properties = new Hashtable<>();
for (Enumeration e = props.keys(); e.hasMoreElements();) {
Object key = e.nextElement();
Object val = props.get(key);
if (!"service.pid".equals(key) && !FeatureConfigInstaller.FABRIC_ZOOKEEPER_PID.equals(key)) {
properties.put(key.toString(), val.toString());
}
}
updateStatus("analyzing", null);
// Building configuration
curatorCompleteService.waitForService(TimeUnit.SECONDS.toMillis(30));
String httpUrl;
List<URI> mavenRepoURIs;
//force reading of updated informations from ZK
if (!fabricService.isEmpty()) {
updateMavenRepositoryConfiguration(fabricService.getService());
}
try {
fabricServiceOperations.lock();
// no one will change the members now
httpUrl = this.httpUrl;
mavenRepoURIs = this.mavenRepoURIs;
} finally {
fabricServiceOperations.unlock();
}
addMavenProxies(properties, httpUrl, mavenRepoURIs);
final MavenResolver resolver = MavenResolvers.createMavenResolver(properties, "org.ops4j.pax.url.mvn");
final DownloadManager manager = DownloadManagers.createDownloadManager(resolver, getDownloadExecutor());
manager.addListener(new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
int pending = manager.pending();
updateStatus(pending > 0 ? "downloading (" + pending + " pending)" : "downloading", null);
}
});
// Update framework, libs, system and config props
final Object lock = new Object();
final AtomicBoolean restart = new AtomicBoolean();
final Set<String> libsToRemove = new HashSet<>(managedLibs.keySet());
final Set<String> endorsedLibsToRemove = new HashSet<>(managedEndorsedLibs.keySet());
final Set<String> extensionLibsToRemove = new HashSet<>(managedExtensionLibs.keySet());
final Set<String> sysPropsToRemove = new HashSet<>(managedSysProps.keySet());
final Set<String> configPropsToRemove = new HashSet<>(managedConfigProps.keySet());
final Set<String> etcsToRemove = new HashSet<>(managedEtcs.keySet());
final Properties configProps = new Properties(new File(KARAF_BASE + File.separator + "etc" + File.separator + "config.properties"));
final Properties systemProps = new Properties(new File(KARAF_BASE + File.separator + "etc" + File.separator + "system.properties"));
Downloader downloader = manager.createDownloader();
for (String key : properties.keySet()) {
if (key.equals("framework")) {
String url = properties.get(key);
if (!url.startsWith("mvn:")) {
throw new IllegalArgumentException("Framework url must use the mvn: protocol");
}
downloader.download(url, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File file = provider.getFile();
String path = file.getPath();
if (path.startsWith(KARAF_HOME)) {
path = path.substring(KARAF_HOME.length() + 1);
}
synchronized (lock) {
if (!path.equals(configProps.get("karaf.framework.felix"))) {
configProps.put("karaf.framework", "felix");
configProps.put("karaf.framework.felix", path);
restart.set(true);
}
}
}
});
} else if (key.startsWith("config.")) {
String k = key.substring("config.".length());
String v = properties.get(key);
synchronized (lock) {
managedConfigProps.put(k, v);
configPropsToRemove.remove(k);
if (!v.equals(configProps.get(k))) {
configProps.put(k, v);
restart.set(true);
}
}
} else if (key.startsWith("system.")) {
String k = key.substring("system.".length());
synchronized (lock) {
String v = properties.get(key);
managedSysProps.put(k, v);
sysPropsToRemove.remove(k);
if (!v.equals(systemProps.get(k))) {
systemProps.put(k, v);
restart.set(true);
}
}
} else if (key.startsWith("lib.")) {
String value = properties.get(key);
downloader.download(value, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File libFile = provider.getFile();
String libName = libFile.getName();
Long checksum = ChecksumUtils.checksum(libFile);
boolean update;
synchronized (lock) {
managedLibs.put(libName, "true");
libsToRemove.remove(libName);
update = !Long.toString(checksum).equals(libChecksums.getProperty(libName));
}
if (update) {
Files.copy(libFile, new File(LIB_PATH, libName));
restart.set(true);
}
}
});
} else if (key.startsWith("endorsed.")) {
String value = properties.get(key);
downloader.download(value, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File libFile = provider.getFile();
String libName = libFile.getName();
Long checksum = ChecksumUtils.checksum(new FileInputStream(libFile));
boolean update;
synchronized (lock) {
managedEndorsedLibs.put(libName, "true");
endorsedLibsToRemove.remove(libName);
update = !Long.toString(checksum).equals(endorsedChecksums.getProperty(libName));
}
if (update) {
Files.copy(libFile, new File(LIB_ENDORSED_PATH, libName));
restart.set(true);
}
}
});
} else if (key.startsWith("extension.")) {
String value = properties.get(key);
downloader.download(value, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File libFile = provider.getFile();
String libName = libFile.getName();
Long checksum = ChecksumUtils.checksum(libFile);
boolean update;
synchronized (lock) {
managedExtensionLibs.put(libName, "true");
extensionLibsToRemove.remove(libName);
update = !Long.toString(checksum).equals(extensionChecksums.getProperty(libName));
}
if (update) {
Files.copy(libFile, new File(LIB_EXT_PATH, libName));
restart.set(true);
}
}
});
} else if (key.startsWith("etc.")) {
String value = properties.get(key);
downloader.download(value, new DownloadCallback() {
@Override
public void downloaded(StreamProvider provider) throws Exception {
File etcFile = provider.getFile();
String etcName = etcFile.getName();
Long checksum = ChecksumUtils.checksum(new FileInputStream(etcFile));
boolean update;
synchronized (lock) {
managedEtcs.put(etcName, "true");
etcsToRemove.remove(etcName);
update = !Long.toString(checksum).equals(etcChecksums.getProperty(etcName));
}
if (update) {
Files.copy(etcFile, new File(KARAF_ETC, etcName));
}
}
});
}
}
downloader.await();
//Remove unused libs, system & config properties
for (String sysProp : sysPropsToRemove) {
systemProps.remove(sysProp);
managedSysProps.remove(sysProp);
System.clearProperty(sysProp);
restart.set(true);
}
for (String configProp : configPropsToRemove) {
configProps.remove(configProp);
managedConfigProps.remove(configProp);
restart.set(true);
}
for (String lib : libsToRemove) {
File libFile = new File(LIB_PATH, lib);
libFile.delete();
libChecksums.remove(lib);
managedLibs.remove(lib);
restart.set(true);
}
for (String lib : endorsedLibsToRemove) {
File libFile = new File(LIB_ENDORSED_PATH, lib);
libFile.delete();
endorsedChecksums.remove(lib);
managedEndorsedLibs.remove(lib);
restart.set(true);
}
for (String lib : extensionLibsToRemove) {
File libFile = new File(LIB_EXT_PATH, lib);
libFile.delete();
extensionChecksums.remove(lib);
managedExtensionLibs.remove(lib);
restart.set(true);
}
for (String etc : etcsToRemove) {
File etcFile = new File(KARAF_ETC, etc);
etcFile.delete();
etcChecksums.remove(etc);
managedEtcs.remove(etc);
}
libChecksums.save();
endorsedChecksums.save();
extensionChecksums.save();
etcChecksums.save();
managedLibs.save();
managedEndorsedLibs.save();
managedExtensionLibs.save();
managedConfigProps.save();
managedSysProps.save();
managedEtcs.save();
if (restart.get()) {
updateStatus("restarting", null);
configProps.save();
systemProps.save();
System.setProperty("karaf.restart", "true");
bundleContext.getBundle(0).stop();
return false;
}
FeatureConfigInstaller configInstaller = null;
ServiceReference configAdminServiceReference = bundleContext.getServiceReference(ConfigurationAdmin.class.getName());
if (configAdminServiceReference != null) {
ConfigurationAdmin configAdmin = (ConfigurationAdmin) bundleContext.getService(configAdminServiceReference);
configInstaller = new FeatureConfigInstaller(bundleContext, configAdmin, manager);
}
int bundleStartTimeout = Constants.BUNDLE_START_TIMEOUT;
String overriddenTimeout = properties.get(Constants.BUNDLE_START_TIMEOUT_PID_KEY);
try{
if(overriddenTimeout != null)
bundleStartTimeout = Integer.parseInt(overriddenTimeout);
}catch(Exception e){
LOGGER.warn("Failed to set {} value: [{}], applying default value: {}", Constants.BUNDLE_START_TIMEOUT_PID_KEY, overriddenTimeout, Constants.BUNDLE_START_TIMEOUT);
}
Agent agent = new Agent(
bundleContext.getBundle(),
systemBundleContext,
manager,
configInstaller,
null,
DEFAULT_FEATURE_RESOLUTION_RANGE,
DEFAULT_BUNDLE_UPDATE_RANGE,
DEFAULT_UPDATE_SNAPSHOTS,
bundleContext.getDataFile(STATE_FILE),
bundleStartTimeout
) {
@Override
public void updateStatus(String status) {
DeploymentAgent.this.updateStatus(status, null, false);
}
@Override
public void updateStatus(String status, boolean force) {
DeploymentAgent.this.updateStatus(status, null, force);
}
@Override
protected void saveState(State newState) throws IOException {
super.saveState(newState);
DeploymentAgent.this.state.replace(newState);
}
@Override
protected void provisionList(Set<Resource> resources) {
DeploymentAgent.this.provisionList = resources;
}
@Override
protected boolean done(boolean agentStarted, List<String> urls) {
if (agentStarted) {
// let's do patch-management "last touch" only if new agent wasn't started.
return true;
}
// agent finished provisioning, we can call back to low level patch management
ServiceReference<PatchManagement> srPm = systemBundleContext.getServiceReference(PatchManagement.class);
ServiceReference<FabricService> srFs = systemBundleContext.getServiceReference(FabricService.class);
if (srPm != null && srFs != null) {
PatchManagement pm = systemBundleContext.getService(srPm);
FabricService fs = systemBundleContext.getService(srFs);
if (pm != null && fs != null) {
LOGGER.info("Validating baseline information");
this.updateStatus("validating baseline information", true);
Profile profile = fs.getCurrentContainer().getOverlayProfile();
Map<String, String> versions = profile.getConfiguration("io.fabric8.version");
File localRepository = resolver.getLocalRepository();
if (pm.alignTo(versions, urls, localRepository, new Runnable() {
@Override
public void run() {
ServiceReference<FabricPatchService> srFps = systemBundleContext.getServiceReference(FabricPatchService.class);
if (srFps != null) {
FabricPatchService fps = systemBundleContext.getService(srFps);
if (fps != null) {
try {
fps.synchronize(false);
} catch (Exception e) {
LOGGER.error(e.getMessage(), e);
}
}
}
}
})) {
this.updateStatus("requires full restart", true);
// let's reuse the same flag
restart.set(true);
return false;
}
if (handleRestartJvmFlag(fs, profile, restart)) {
return false;
}
}
}
return true;
}
};
agent.setDeploymentAgentId(deploymentAgentId);
agent.provision(
getPrefixedProperties(properties, "repository."),
getPrefixedProperties(properties, "feature."),
getPrefixedProperties(properties, "bundle."),
getPrefixedProperties(properties, "req."),
getPrefixedProperties(properties, "override."),
getPrefixedProperties(properties, "optional."),
getMetadata(properties, "metadata#")
);
if (restart.get()) {
// prevent updating status to "success"
return false;
}
return true;
}
/**
* Adds support for a directive to force a restart upon the first assignment of a specific profile to a container.
* It creates an entry in zk so that a subsequent modification to the same profile, will not trigger a jvm restart.
* The behavior is useful for situation when a profile provision .jars in lib/ folder, that are picked up only at
* jvm boot time.
*
* @param fs
* @param profile
* @param restart
* @return
*/
protected boolean handleRestartJvmFlag(FabricService fs, Profile profile, AtomicBoolean restart) {
boolean result = false;
List<String> profilesRequiringRestart = new ArrayList<>();
ServiceReference<CuratorFramework> curatorServiceReference = systemBundleContext.getServiceReference(CuratorFramework.class);
if (curatorServiceReference != null) {
CuratorFramework curator = systemBundleContext.getService(curatorServiceReference);
// check for jvm restart requests
Map<String, String> agentProperties = profile.getConfiguration("io.fabric8.agent");
Map<String, String> jvmRestartEntries = new HashMap<>();
for(String key : agentProperties.keySet()){
if(key.startsWith("io.fabric8.agent.forceOneTimeJVMRestart")){
jvmRestartEntries.put(key, agentProperties.get(key));
LOGGER.info("Found a profile carrying a one-time JVM restart request: {}", key);
}
}
// clean old entries
String basePath = ZkPath.CONTAINER_PROVISION_RESTART.getPath(fs.getCurrentContainerName());
try {
if(ZooKeeperUtils.exists(curator, basePath) != null ){
List<String> zkPaths = ZooKeeperUtils.getAllChildren(curator, ZkPath.CONTAINER_PROVISION_RESTART.getPath(fs.getCurrentContainerName()));
List<String> activeProfiles = fs.getCurrentContainer().getProfileIds();
for(String zkPath : zkPaths){
String[] split = zkPath.split("/");
String prof = split[split.length -1];
if(!activeProfiles.contains(prof)){
LOGGER.info("Deleting old JVM restart request status: {}", zkPath);
ZooKeeperUtils.delete(curator, zkPath);
}
}
}
} catch (Exception e) {
LOGGER.error("Unable to check ZK connection", e);
}
for(String key : jvmRestartEntries.keySet()){
String[] split = key.split("\\.");
String profileForcingRestart = split[split.length-1];
// check container-profile-znode
// if it was already in zk for the current container, do nothing
try {
String zkPath = ZkPath.CONTAINER_PROVISION_RESTART_PROFILES.getPath(fs.getCurrentContainerName(), profileForcingRestart);
Stat exists = exists(curator, zkPath);
if(exists == null){
ZooKeeperUtils.create(curator, zkPath);
profilesRequiringRestart.add(profileForcingRestart);
result = true;
}
} catch (Exception e) {
LOGGER.error("Unable to check ZK connection", e);
}
}
}
if(result){
System.setProperty("karaf.restart.jvm", "true");
restart.set(true);
LOGGER.warn("Profiles {} scheduled a JVM restart request. Automated JVM restart support is not universally available. If your jvm doesn't support it you are required to manually restart the container that has just been assigned the profile.", profilesRequiringRestart);
try {
bundleContext.getBundle(0).stop();
} catch (BundleException e) {
LOGGER.error("Error when forcing a JVM restart", e);
}
}
return result;
}
public static Set<String> getPrefixedProperties(Map<String, String> properties, String prefix) {
Set<String> result = new HashSet<>();
for (String key : properties.keySet()) {
if (key.startsWith(prefix)) {
String url = properties.get(key);
if (url == null || url.length() == 0) {
url = key.substring(prefix.length());
}
if (url.length() > 0) {
result.add(url);
}
}
}
return result;
}
public static Map<String, Map<VersionRange, Map<String, String>>> getMetadata(Map<String, String> properties, String prefix) {
Map<String, Map<VersionRange, Map<String, String>>> result = new HashMap<>();
for (String key : properties.keySet()) {
if (key.startsWith(prefix)) {
String val = properties.get(key);
key = key.substring(prefix.length());
String[] parts = key.split("#");
if (parts.length == 3) {
Map<VersionRange, Map<String, String>> ranges = result.get(parts[0]);
if (ranges == null) {
ranges = new HashMap<>();
result.put(parts[0], ranges);
}
String version = parts[1];
if (!version.startsWith("[") && !version.startsWith("(")) {
version = Macro.transform("${range;[==,=+)}", version);
}
VersionRange range = new VersionRange(version);
Map<String, String> hdrs = ranges.get(range);
if (hdrs == null) {
hdrs = new HashMap<>();
ranges.put(range, hdrs);
}
hdrs.put(parts[2], val);
}
}
}
return result;
}
protected ScheduledExecutorService getDownloadExecutor() {
return downloadExecutor;
}
}
| |
/*
* Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 4495742
* @ignore JSSE supported cipher suites are changed with CR 6916074,
* need to update this test case in JDK 7 soon
* @run main/timeout=180 TestAllSuites
* @summary Add non-blocking SSL/TLS functionality, usable with any
* I/O abstraction
*
* Iterate through all the suites using both TLS and SSLv3, and turn
* SSLv2Hello off and on. Exchange some bytes and shutdown.
*
* @author Brad Wetmore
*/
import javax.net.ssl.*;
import javax.net.ssl.SSLEngineResult.*;
import java.io.*;
import java.security.*;
import java.nio.*;
import java.util.*;
public class TestAllSuites {
private static boolean debug = false;
private SSLContext sslc;
private SSLEngine ssle1; // client
private SSLEngine ssle2; // server
private static String pathToStores = "../../../../../etc";
private static String keyStoreFile = "keystore";
private static String trustStoreFile = "truststore";
private static String passwd = "passphrase";
private static String keyFilename =
System.getProperty("test.src", "./") + "/" + pathToStores +
"/" + keyStoreFile;
private static String trustFilename =
System.getProperty("test.src", "./") + "/" + pathToStores +
"/" + trustStoreFile;
private ByteBuffer appOut1; // write side of ssle1
private ByteBuffer appIn1; // read side of ssle1
private ByteBuffer appOut2; // write side of ssle2
private ByteBuffer appIn2; // read side of ssle2
private ByteBuffer oneToTwo; // "reliable" transport ssle1->ssle2
private ByteBuffer twoToOne; // "reliable" transport ssle2->ssle1
String [][] protocols = new String [][] {
{ "SSLv3" },
{ "TLSv1" },
{ "SSLv3", "SSLv2Hello"},
{ "TLSv1", "SSLv2Hello"}
};
/*
* Majority of the test case is here, setup is done below.
*/
private void createSSLEngines() throws Exception {
ssle1 = sslc.createSSLEngine("client", 1);
ssle1.setUseClientMode(true);
ssle2 = sslc.createSSLEngine("server", 2);
ssle2.setUseClientMode(false);
}
private void test() throws Exception {
createSSLEngines();
String [] suites = ssle1.getSupportedCipherSuites();
for (int i = 0; i < suites.length; i++) {
for (int j = 0; j < protocols.length; j++) {
createSSLEngines();
runTest(suites[i], protocols[j]);
}
}
}
private void runTest(String suite, String [] protocols) throws Exception {
boolean dataDone = false;
System.out.println("======================================");
System.out.println("Testing: " + suite);
for (int i = 0; i < protocols.length; i++) {
System.out.print(protocols[i] + " ");
}
/*
* Don't run the Kerberized suites for now.
*/
if (suite.startsWith("TLS_KRB5")) {
System.out.println("Ignoring Kerberized suite");
return;
}
/*
* Don't run the SCSV suite
*/
if (suite.equals("TLS_EMPTY_RENEGOTIATION_INFO_SCSV")) {
System.out.println("Ignoring SCSV suite");
return;
}
if (!suite.contains("DH_anon")) {
ssle2.setNeedClientAuth(true);
}
String [] suites = new String [] { suite };
ssle1.setEnabledCipherSuites(suites);
ssle2.setEnabledCipherSuites(suites);
ssle1.setEnabledProtocols(protocols);
ssle2.setEnabledProtocols(protocols);
createBuffers();
SSLEngineResult result1; // ssle1's results from last operation
SSLEngineResult result2; // ssle2's results from last operation
Date start = new Date();
while (!isEngineClosed(ssle1) || !isEngineClosed(ssle2)) {
log("----------------");
result1 = ssle1.wrap(appOut1, oneToTwo);
result2 = ssle2.wrap(appOut2, twoToOne);
log("wrap1: " + result1);
log("oneToTwo = " + oneToTwo);
log("");
log("wrap2: " + result2);
log("twoToOne = " + twoToOne);
runDelegatedTasks(result1, ssle1);
runDelegatedTasks(result2, ssle2);
oneToTwo.flip();
twoToOne.flip();
log("----");
result1 = ssle1.unwrap(twoToOne, appIn1);
result2 = ssle2.unwrap(oneToTwo, appIn2);
log("unwrap1: " + result1);
log("twoToOne = " + twoToOne);
log("");
log("unwrap2: " + result2);
log("oneToTwo = " + oneToTwo);
runDelegatedTasks(result1, ssle1);
runDelegatedTasks(result2, ssle2);
oneToTwo.compact();
twoToOne.compact();
/*
* If we've transfered all the data between app1 and app2,
* we try to close and see what that gets us.
*/
if (!dataDone && (appOut1.limit() == appIn2.position()) &&
(appOut2.limit() == appIn1.position())) {
checkTransfer(appOut1, appIn2);
checkTransfer(appOut2, appIn1);
log("Closing ssle1's *OUTBOUND*...");
ssle1.closeOutbound();
dataDone = true;
}
}
/*
* Just for grins, try closing again, make sure nothing
* strange is happening after we're closed.
*/
ssle1.closeInbound();
ssle1.closeOutbound();
ssle2.closeInbound();
ssle2.closeOutbound();
appOut1.rewind();
appIn1.clear();
oneToTwo.clear();
result1 = ssle1.wrap(appOut1, oneToTwo);
checkResult(result1);
result1 = ssle1.unwrap(oneToTwo, appIn1);
checkResult(result1);
System.out.println("Test Passed.");
System.out.println("\n======================================");
Date end = new Date();
elapsed += end.getTime() - start.getTime();
}
static long elapsed = 0;
private static void checkResult(SSLEngineResult result) throws Exception {
if ((result.getStatus() != Status.CLOSED) ||
(result.getHandshakeStatus() !=
HandshakeStatus.NOT_HANDSHAKING) ||
(result.bytesConsumed() != 0) ||
(result.bytesProduced() != 0)) {
throw new Exception("Unexpected close status");
}
}
public static void main(String args[]) throws Exception {
TestAllSuites tas;
tas = new TestAllSuites();
tas.createSSLEngines();
tas.test();
System.out.println("All Tests Passed.");
System.out.println("Elapsed time: " + elapsed / 1000.0);
}
/*
* **********************************************************
* Majority of the test case is above, below is just setup stuff
* **********************************************************
*/
public TestAllSuites() throws Exception {
sslc = getSSLContext(keyFilename, trustFilename);
}
/*
* Create an initialized SSLContext to use for this test.
*/
private SSLContext getSSLContext(String keyFile, String trustFile)
throws Exception {
KeyStore ks = KeyStore.getInstance("JKS");
KeyStore ts = KeyStore.getInstance("JKS");
char[] passphrase = "passphrase".toCharArray();
ks.load(new FileInputStream(keyFile), passphrase);
ts.load(new FileInputStream(trustFile), passphrase);
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
kmf.init(ks, passphrase);
TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
tmf.init(ts);
SSLContext sslCtx = SSLContext.getInstance("TLS");
sslCtx.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
return sslCtx;
}
private void createBuffers() {
// Size the buffers as appropriate.
SSLSession session = ssle1.getSession();
int appBufferMax = session.getApplicationBufferSize();
int netBufferMax = session.getPacketBufferSize();
appIn1 = ByteBuffer.allocateDirect(appBufferMax + 50);
appIn2 = ByteBuffer.allocateDirect(appBufferMax + 50);
oneToTwo = ByteBuffer.allocateDirect(netBufferMax);
twoToOne = ByteBuffer.allocateDirect(netBufferMax);
appOut1 = ByteBuffer.wrap("Hi Engine2, I'm SSLEngine1".getBytes());
appOut2 = ByteBuffer.wrap("Hello Engine1, I'm SSLEngine2".getBytes());
log("AppOut1 = " + appOut1);
log("AppOut2 = " + appOut2);
log("");
}
private static void runDelegatedTasks(SSLEngineResult result,
SSLEngine engine) throws Exception {
if (result.getHandshakeStatus() == HandshakeStatus.NEED_TASK) {
Runnable runnable;
while ((runnable = engine.getDelegatedTask()) != null) {
log("running delegated task...");
runnable.run();
}
}
}
private static boolean isEngineClosed(SSLEngine engine) {
return (engine.isOutboundDone() && engine.isInboundDone());
}
private static void checkTransfer(ByteBuffer a, ByteBuffer b)
throws Exception {
a.flip();
b.flip();
if (!a.equals(b)) {
throw new Exception("Data didn't transfer cleanly");
} else {
log("Data transferred cleanly");
}
a.position(a.limit());
b.position(b.limit());
a.limit(a.capacity());
b.limit(b.capacity());
}
private static void log(String str) {
if (debug) {
System.out.println(str);
}
}
}
| |
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import org.firstinspires.ftc.robotcore.external.navigation.RelicRecoveryVuMark;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import com.qualcomm.robotcore.hardware.SwitchableLight;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import com.qualcomm.hardware.bosch.BNO055IMU;
import com.qualcomm.robotcore.hardware.GyroSensor;
import com.qualcomm.robotcore.hardware.Servo;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.util.ElapsedTime;
@Autonomous
public class AutoBackLeft180 extends LinearOpMode {
/* Declare OpMode members. */
// HardwarePushbot robot = new HardwarePushbot(); // Use a Pushbot's hardware
private ElapsedTime runtime = new ElapsedTime();
Servo servoRB;
Servo servoLB;
Servo servoRF;
Servo servoLF;
GyroSensor gs;
DcMotor liftermotor;
DcMotor drivefrontone;
DcMotor drivefronttwo;
DcMotor drivebackone;
DcMotor drivebacktwo;
DcMotor relicthrower;
Orientation angles;
static final double FORWARD_SPEED = 0.6;
static final double TURN_SPEED = 0.5;
public static final String TAG = "Vuforia VuMark Sample";
OpenGLMatrix lastLocation = null;
/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
* localization engine.
*/
VuforiaLocalizer vuforia;
public void driveForward(float speed) {
drivefrontone.setPower(-speed);
drivefronttwo.setPower(speed);
drivebackone.setPower(-speed);
drivebacktwo.setPower(speed);
}
public void driveBack(float speed) {
drivefrontone.setPower(speed);
drivefronttwo.setPower(-speed);
drivebackone.setPower(speed);
drivebacktwo.setPower(-speed);
}
public void waitFor(float times) {
runtime.reset();
while (opModeIsActive() && (runtime.seconds() < times/3)) {
telemetry.addData("Path", "Leg 3: %2.5f S Elapsed", runtime.seconds());
telemetry.update();
}
}
public void liftBlock() {
liftermotor.setPower(0.2);
runtime.reset();
while (opModeIsActive() && (runtime.seconds() < 0.0001)) {
telemetry.addData("Path", "Lifting: %2.5f S Elapsed", runtime.seconds());
telemetry.update();
}
}
public void grabBlock() {
servoRB.setPosition(1.45);
servoRF.setPosition(1.45);
servoLB.setPosition(1.45);
servoLF.setPosition(1.45);
}
public void dropBlock() {
servoRB.setPosition(0.0077);
servoRF.setPosition(0.25);
servoLB.setPosition(0.01);
servoLF.setPosition(0.4);
telemetry.addData("Grabbed", "Block");
}
public void bringDown() {
liftermotor.setPower(-0.1);
runtime.reset();
while (opModeIsActive() && (runtime.seconds() < 0.0001)) {
telemetry.addData("Path", "Dropping Block: %2.5f S Elapsed", runtime.seconds());
telemetry.update();
}
}
public void turnRight() {
BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
BNO055IMU imu = hardwareMap.get(BNO055IMU.class, "gs");
imu.initialize(parameters);
Orientation angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
// telemetry.addD
while(true)
{
angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
if (angles.firstAngle < 84) {
telemetry.addData("Right", "yes: " + angles.firstAngle);
telemetry.update();
drivefrontone.setPower(0.35);
drivefronttwo.setPower(0.35);
drivebackone.setPower(0.35);
drivebacktwo.setPower(0.35);
}
else {
telemetry.addData("Right","no" + angles.firstAngle);
drivebackone.setPower(0.0);
drivebacktwo.setPower(0.0);
drivefrontone.setPower(0.0);
drivefronttwo.setPower(0.0);
break;
}
}
}
public void turnLeft() {
BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
BNO055IMU imu = hardwareMap.get(BNO055IMU.class, "gs");
imu.initialize(parameters);
Orientation angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
while(true)
{
angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
if (angles.firstAngle < 84) {
telemetry.addData("Left", "yes: " + angles.firstAngle);
telemetry.update();
drivefrontone.setPower(-0.35);
drivefronttwo.setPower(-0.35);
drivebackone.setPower(-0.35);
drivebacktwo.setPower(-0.35);
}
else {
telemetry.addData("Left","no" + angles.firstAngle);
drivebackone.setPower(0.0);
drivebacktwo.setPower(0.0);
drivefrontone.setPower(0.0);
drivefronttwo.setPower(0.0);
break;
}
}
}
@Override
public void runOpMode() {
/*
* Initialize the drive system variables.
* The init() method of the hardware class does all the work here
*/
// robot.init(hardwareMap);
// Send telemetry message to signify robot waiting;
// telemetry.addData("Status", "Ready to run"); //
telemetry.update();
servoRB = hardwareMap.get(Servo.class, "rb");
servoRF = hardwareMap.get(Servo.class, "rt");
servoLB = hardwareMap.get(Servo.class, "lb");
servoLF = hardwareMap.get(Servo.class, "lt");
servoRB.setDirection(Servo.Direction.REVERSE);
servoRF.setDirection(Servo.Direction.REVERSE);
BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
BNO055IMU imu = hardwareMap.get(BNO055IMU.class, "gs");
imu.initialize(parameters);
Orientation angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
liftermotor = hardwareMap.dcMotor.get("liftermotor");
liftermotor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
relicthrower = hardwareMap.dcMotor.get("rrc");
relicthrower.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
drivefrontone = hardwareMap.dcMotor.get("rf");
drivefronttwo = hardwareMap.dcMotor.get("lf");
drivebackone = hardwareMap.dcMotor.get("rba");
drivebacktwo = hardwareMap.dcMotor.get("lba");
drivefrontone.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
drivefronttwo.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
drivebackone.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
drivebacktwo.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters2 = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
parameters2.vuforiaLicenseKey = "AUbpCEX/////AAAAGTLACtHX+0EJm3l+5ZOENEOBdEOdMJ7zgnhbLJN82nG6pys+khC3Y0l2odX+drpSwnRRzYYNQouYsqEwxCQo+vmM8qQuSR++lngbVq/7tZ+35AAyjKe+HO1NTcB1V9PbjyHtOUYAoPjfhW4/ErAxZ3BC+enW5VnBxmZMPeWVsVuMyDMiBFapkva3CxTZ7dN0mjBCp7AUOCYVSXPKNEjIyahN7pTsJV+zahoF5Gi2n0tM5DK2jRUD4P6HO95DL+G5cNECyC0BZVxdtkUz3upFnH+oYvI3b+QR/1s2o8RnPfE/k/BxirirkT4ADQl5Ct3+b0InnG9CyWydvvr7l/fkfWV79DjbDgKTnocKk250Jgba";
//gs = hardwareMap.get(GyroSensor.class, "gs");
// jt = hardwareMap.get(Servo.class, "jt");
// Wait for the game to start (driver presses PLAY)
//testGyro();
// telemetry.addData("THis", "this: " + angles);
telemetry.update();
parameters2.cameraDirection = VuforiaLocalizer.CameraDirection.FRONT;
this.vuforia = ClassFactory.createVuforiaLocalizer(parameters2);
VuforiaTrackables relicTrackables = this.vuforia.loadTrackablesFromAsset("RelicVuMark");
VuforiaTrackable relicTemplate = relicTrackables.get(0);
relicTemplate.setName("relicVuMarkTemplate"); // can help in debugging; otherwise not necessary
telemetry.addData(">", "Press Play to start");
telemetry.update();
waitForStart();
telemetry.addData("Waiting", "for three seconds");
telemetry.update();
sleep(3000);
relicTrackables.activate();
// int thigstuff = 0;
// while (thigstuff <= 20) {
// angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
// AngleUnit ds = AngleUnit.DEGREES;
// telemetry.addData("THis", "this: " + angles.firstAngle);
// telemetry.update();
// thigstuff = thigstuff++;
// };
// Step through each leg of the path, ensuring that the Auto mode has not been stopped along the way
// Step 1: Drive forward for 3 seconds
// robot.leftDrive.setPower(FORWARD_SPEED);
// robot.rightDrive.setPower(FORWARD_SPEED);
// Deive off platform
// driveForward((float) 0.9);
// waitFor((float) 1.75);
// Turn left and go Forward
// turnLeft();
// driveForward((float) 0.9);
// waitFor((float) 0.5);
// grabBlock();
RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
if (vuMark != RelicRecoveryVuMark.UNKNOWN) {
/* Found an instance of the template. In the actual game, you will probably
* loop until this condition occurs, then move on to act accordingly depending
* on which VuMark was visible. */
telemetry.addData("VuMark", "%s visible", vuMark);
telemetry.update();
if(vuMark == RelicRecoveryVuMark.LEFT) {
telemetry.addData("LEFT(RUNNING) --> ","%s", vuMark);
telemetry.update();
grabBlock();
// waitFor((float)0.5);
sleep(1000);
// Lift grabbed block
liftBlock();
sleep(4000);
liftermotor.setPower(0.0);
driveBack((float)0.5);
waitFor((float) 1.85);
sleep(1000);
while(true)
{
angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
if (angles.firstAngle < 88) {
telemetry.addData("Left", "yes: " + angles.firstAngle);
telemetry.update();
drivefrontone.setPower(-0.35);
drivefronttwo.setPower(-0.35);
drivebackone.setPower(-0.35);
drivebacktwo.setPower(-0.35);
}
else {
telemetry.addData("Left","no" + angles.firstAngle);
drivebackone.setPower(0.0);
drivebacktwo.setPower(0.0);
drivefrontone.setPower(0.0);
drivefronttwo.setPower(0.0);
break;
}
}
// bringDown();
// driveBack((float) 0.5);
// waitFor((float) 1.0);
driveForward((float)0.4225);
//waitFor((float) 0.3);
// Drop block
sleep(1000);
dropBlock();
sleep(1000);
liftBlock();
sleep(3000);
driveBack((float)0.3);
//driveBack((float)0.1);
waitFor((float) 0.0025);
bringDown();
telemetry.addData("Program", "Complete");
telemetry.update();
sleep(2000);
}
else if(vuMark == RelicRecoveryVuMark.RIGHT) {
telemetry.addData("RIGHT(RUNNING) --> ","%s", vuMark);
grabBlock();
// waitFor((float)0.5);
sleep(1000);
// Lift grabbed block
liftBlock();
sleep(4000);
liftermotor.setPower(0.0);
driveBack((float)0.25);
waitFor((float) 1.80);
sleep(1000);
while(true)
{
angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
if (angles.firstAngle < 88) {
telemetry.addData("Left", "yes: " + angles.firstAngle);
telemetry.update();
drivefrontone.setPower(-0.35);
drivefronttwo.setPower(-0.35);
drivebackone.setPower(-0.35);
drivebacktwo.setPower(-0.35);
}
else {
telemetry.addData("Left","no" + angles.firstAngle);
drivebackone.setPower(0.0);
drivebacktwo.setPower(0.0);
drivefrontone.setPower(0.0);
drivefronttwo.setPower(0.0);
break;
}
}
// bringDown();
// driveBack((float) 0.5);
// waitFor((float) 1.0);
driveForward((float)0.4225);
//waitFor((float) 0.3);
// Drop block
sleep(1000);
dropBlock();
sleep(1000);
liftBlock();
sleep(3000);
driveBack((float)0.3);
//driveBack((float)0.1);
waitFor((float) 0.0025);
bringDown();
telemetry.addData("Program", "Complete");
telemetry.update();
sleep(2000);
}
else if(vuMark == RelicRecoveryVuMark.LEFT) {
telemetry.addData("CENTER(RUNNING) --> ","%s", vuMark);
grabBlock();
// waitFor((float)0.5);
sleep(1000);
// Lift grabbed block
liftBlock();
sleep(4000);
liftermotor.setPower(0.0);
driveBack((float)0.374);
waitFor((float) 1.80);
sleep(1000);
while(true)
{
angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
if (angles.firstAngle < 88) {
telemetry.addData("Left", "yes: " + angles.firstAngle);
telemetry.update();
drivefrontone.setPower(-0.35);
drivefronttwo.setPower(-0.35);
drivebackone.setPower(-0.35);
drivebacktwo.setPower(-0.35);
}
else {
telemetry.addData("Left","no" + angles.firstAngle);
drivebackone.setPower(0.0);
drivebacktwo.setPower(0.0);
drivefrontone.setPower(0.0);
drivefronttwo.setPower(0.0);
break;
}
}
// bringDown();
// driveBack((float) 0.5);
// waitFor((float) 1.0);
driveForward((float)0.4225);
//waitFor((float) 0.3);
// Drop block
sleep(1000);
dropBlock();
sleep(1000);
liftBlock();
sleep(3000);
driveBack((float)0.3);
//driveBack((float)0.1);
waitFor((float) 0.0025);
bringDown();
telemetry.addData("Program", "Complete");
telemetry.update();
sleep(2000);
}
/* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
* it is perhaps unlikely that you will actually need to act on this pose information, but
* we illustrate it nevertheless, for completeness. */
// OpenGLMatrix pose = ((VuforiaTrackableDefaultListener)relicTemplate.getListener()).getPose();
// telemetry.addData("Pose", format(pose));
/* We further illustrate how to decompose the pose into useful rotational and
* translational components */
// if (pose != null) {
// VectorF trans = pose.getTranslation();
// Orientation rot = Orientation.getOrientation(pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
// // Extract the X, Y, and Z components of the offset of the target relative to the robot
// double tX = trans.get(0);
// double tY = trans.get(1);
// double tZ = trans.get(2);
// // Extract the rotational components of the target relative to the robot
// double rX = rot.firstAngle;
// double rY = rot.secondAngle;
// double rZ = rot.thirdAngle;
// }
}
else {
telemetry.addData("VuMark", "not visible --> ASSUMING LEFT");
telemetry.update();
grabBlock();
// waitFor((float)0.5);
sleep(1000);
// Lift grabbed block
liftBlock();
sleep(4000);
liftermotor.setPower(0.0);
driveBack((float)0.4);
waitFor((float) 0.46);
sleep(1000);
while(true)
{
angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
if (angles.firstAngle < 88) {
telemetry.addData("Left", "yes: " + angles.firstAngle);
telemetry.update();
drivefrontone.setPower(-0.35);
drivefronttwo.setPower(-0.35);
drivebackone.setPower(-0.35);
drivebacktwo.setPower(-0.35);
}
else {
telemetry.addData("Left","no" + angles.firstAngle);
drivebackone.setPower(0.0);
drivebacktwo.setPower(0.0);
drivefrontone.setPower(0.0);
drivefronttwo.setPower(0.0);
break;
}
}
// bringDown();
// driveBack((float) 0.5);
// waitFor((float) 1.0);
driveForward((float)0.2);
waitFor((float) 0.2);
// Drop block
sleep(1000);
dropBlock();
sleep(1000);
liftBlock();
sleep(3000);
driveBack((float)0.3);
//driveBack((float)0.1);
waitFor((float) 0.25);
bringDown();
telemetry.addData("Program", "Complete");
telemetry.update();
sleep(2000);
}
telemetry.update();
}
String format(OpenGLMatrix transformationMatrix) {
return (transformationMatrix != null) ? transformationMatrix.formatAsTransform() : "null";
}
}
| |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.incubator.store.virtual.impl;
import com.google.common.collect.Collections2;
import com.google.common.collect.Maps;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.Service;
import org.onosproject.cluster.ClusterService;
import org.onosproject.cluster.NodeId;
import org.onosproject.incubator.net.virtual.NetworkId;
import org.onosproject.incubator.net.virtual.VirtualNetworkMeterStore;
import org.onosproject.incubator.store.meter.impl.MeterData;
import org.onosproject.net.DeviceId;
import org.onosproject.net.meter.DefaultMeter;
import org.onosproject.net.meter.Meter;
import org.onosproject.net.meter.MeterEvent;
import org.onosproject.net.meter.MeterFailReason;
import org.onosproject.net.meter.MeterFeatures;
import org.onosproject.net.meter.MeterFeaturesKey;
import org.onosproject.net.meter.MeterKey;
import org.onosproject.net.meter.MeterOperation;
import org.onosproject.net.meter.MeterStoreDelegate;
import org.onosproject.net.meter.MeterStoreResult;
import org.onosproject.store.service.StorageException;
import org.slf4j.Logger;
import java.util.Collection;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import static org.onosproject.net.meter.MeterFailReason.TIMEOUT;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Implementation of the virtual meter store for a single instance.
*/
//TODO: support distributed meter store for virtual networks
@Component(immediate = true)
@Service
public class SimpleVirtualMeterStore
extends AbstractVirtualStore<MeterEvent, MeterStoreDelegate>
implements VirtualNetworkMeterStore {
private Logger log = getLogger(getClass());
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected ClusterService clusterService;
private ConcurrentMap<NetworkId, ConcurrentMap<MeterKey, MeterData>> meterMap =
Maps.newConcurrentMap();
private NodeId local;
private ConcurrentMap<NetworkId, ConcurrentMap<MeterFeaturesKey, MeterFeatures>>
meterFeatureMap = Maps.newConcurrentMap();
private ConcurrentMap<NetworkId,
ConcurrentMap<MeterKey, CompletableFuture<MeterStoreResult>>> futuresMap =
Maps.newConcurrentMap();
@Activate
public void activate() {
log.info("Started");
local = clusterService.getLocalNode().id();
}
@Deactivate
public void deactivate() {
log.info("Stopped");
}
private ConcurrentMap<MeterKey, MeterData> getMetersByNetwork(NetworkId networkId) {
meterMap.computeIfAbsent(networkId, m -> new ConcurrentHashMap<>());
return meterMap.get(networkId);
}
private ConcurrentMap<MeterFeaturesKey, MeterFeatures>
getMeterFeaturesByNetwork(NetworkId networkId) {
meterFeatureMap.computeIfAbsent(networkId, f -> new ConcurrentHashMap<>());
return meterFeatureMap.get(networkId);
}
private ConcurrentMap<MeterKey, CompletableFuture<MeterStoreResult>>
getFuturesByNetwork(NetworkId networkId) {
futuresMap.computeIfAbsent(networkId, f -> new ConcurrentHashMap<>());
return futuresMap.get(networkId);
}
@Override
public CompletableFuture<MeterStoreResult> storeMeter(NetworkId networkId, Meter meter) {
ConcurrentMap<MeterKey, MeterData> meters = getMetersByNetwork(networkId);
ConcurrentMap<MeterKey, CompletableFuture<MeterStoreResult>> futures =
getFuturesByNetwork(networkId);
CompletableFuture<MeterStoreResult> future = new CompletableFuture<>();
MeterKey key = MeterKey.key(meter.deviceId(), meter.id());
futures.put(key, future);
MeterData data = new MeterData(meter, null, local);
try {
meters.put(key, data);
} catch (StorageException e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public CompletableFuture<MeterStoreResult> deleteMeter(NetworkId networkId, Meter meter) {
ConcurrentMap<MeterKey, MeterData> meters = getMetersByNetwork(networkId);
ConcurrentMap<MeterKey, CompletableFuture<MeterStoreResult>> futures =
getFuturesByNetwork(networkId);
CompletableFuture<MeterStoreResult> future = new CompletableFuture<>();
MeterKey key = MeterKey.key(meter.deviceId(), meter.id());
futures.put(key, future);
MeterData data = new MeterData(meter, null, local);
// update the state of the meter. It will be pruned by observing
// that it has been removed from the dataplane.
try {
if (meters.computeIfPresent(key, (k, v) -> data) == null) {
future.complete(MeterStoreResult.success());
}
} catch (StorageException e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public MeterStoreResult storeMeterFeatures(NetworkId networkId, MeterFeatures meterfeatures) {
ConcurrentMap<MeterFeaturesKey, MeterFeatures> meterFeatures
= getMeterFeaturesByNetwork(networkId);
MeterStoreResult result = MeterStoreResult.success();
MeterFeaturesKey key = MeterFeaturesKey.key(meterfeatures.deviceId());
try {
meterFeatures.putIfAbsent(key, meterfeatures);
} catch (StorageException e) {
result = MeterStoreResult.fail(TIMEOUT);
}
return result;
}
@Override
public MeterStoreResult deleteMeterFeatures(NetworkId networkId, DeviceId deviceId) {
ConcurrentMap<MeterFeaturesKey, MeterFeatures> meterFeatures
= getMeterFeaturesByNetwork(networkId);
MeterStoreResult result = MeterStoreResult.success();
MeterFeaturesKey key = MeterFeaturesKey.key(deviceId);
try {
meterFeatures.remove(key);
} catch (StorageException e) {
result = MeterStoreResult.fail(TIMEOUT);
}
return result;
}
@Override
public CompletableFuture<MeterStoreResult> updateMeter(NetworkId networkId, Meter meter) {
ConcurrentMap<MeterKey, MeterData> meters = getMetersByNetwork(networkId);
ConcurrentMap<MeterKey, CompletableFuture<MeterStoreResult>> futures =
getFuturesByNetwork(networkId);
CompletableFuture<MeterStoreResult> future = new CompletableFuture<>();
MeterKey key = MeterKey.key(meter.deviceId(), meter.id());
futures.put(key, future);
MeterData data = new MeterData(meter, null, local);
try {
if (meters.computeIfPresent(key, (k, v) -> data) == null) {
future.complete(MeterStoreResult.fail(MeterFailReason.INVALID_METER));
}
} catch (StorageException e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public void updateMeterState(NetworkId networkId, Meter meter) {
ConcurrentMap<MeterKey, MeterData> meters = getMetersByNetwork(networkId);
MeterKey key = MeterKey.key(meter.deviceId(), meter.id());
meters.computeIfPresent(key, (k, v) -> {
DefaultMeter m = (DefaultMeter) v.meter();
m.setState(meter.state());
m.setProcessedPackets(meter.packetsSeen());
m.setProcessedBytes(meter.bytesSeen());
m.setLife(meter.life());
// TODO: Prune if drops to zero.
m.setReferenceCount(meter.referenceCount());
return new MeterData(m, null, v.origin());
});
}
@Override
public Meter getMeter(NetworkId networkId, MeterKey key) {
ConcurrentMap<MeterKey, MeterData> meters = getMetersByNetwork(networkId);
MeterData data = meters.get(key);
return data == null ? null : data.meter();
}
@Override
public Collection<Meter> getAllMeters(NetworkId networkId) {
ConcurrentMap<MeterKey, MeterData> meters = getMetersByNetwork(networkId);
return Collections2.transform(meters.values(), MeterData::meter);
}
@Override
public void failedMeter(NetworkId networkId, MeterOperation op, MeterFailReason reason) {
ConcurrentMap<MeterKey, MeterData> meters = getMetersByNetwork(networkId);
MeterKey key = MeterKey.key(op.meter().deviceId(), op.meter().id());
meters.computeIfPresent(key, (k, v) ->
new MeterData(v.meter(), reason, v.origin()));
}
@Override
public void deleteMeterNow(NetworkId networkId, Meter m) {
ConcurrentMap<MeterKey, MeterData> meters = getMetersByNetwork(networkId);
ConcurrentMap<MeterKey, CompletableFuture<MeterStoreResult>> futures =
getFuturesByNetwork(networkId);
MeterKey key = MeterKey.key(m.deviceId(), m.id());
futures.remove(key);
meters.remove(key);
}
@Override
public long getMaxMeters(NetworkId networkId, MeterFeaturesKey key) {
ConcurrentMap<MeterFeaturesKey, MeterFeatures> meterFeatures
= getMeterFeaturesByNetwork(networkId);
MeterFeatures features = meterFeatures.get(key);
return features == null ? 0L : features.maxMeter();
}
}
| |
package org.nd4j.linalg.api.blas.impl;
import org.nd4j.linalg.api.blas.Level3;
import org.nd4j.linalg.api.blas.params.GemmParams;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.complex.IComplexDouble;
import org.nd4j.linalg.api.complex.IComplexFloat;
import org.nd4j.linalg.api.complex.IComplexNDArray;
import org.nd4j.linalg.api.complex.IComplexNumber;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.NDArrayFactory;
/**
* Base class for level 3 functions, abstract headers pulled from:
* http://www.netlib.org/blas/blast-forum/cblas.h
*
* @author Adam Gibson
*/
public abstract class BaseLevel3 extends BaseLevel implements Level3 {
/**
* gemm performs a matrix-matrix operation
* c := alpha*op(a)*op(b) + beta*c,
* where c is an m-by-n matrix,
* op(a) is an m-by-k matrix,
* op(b) is a k-by-n matrix.
* @param Order
* @param TransA
* @param TransB
* @param alpha
* @param A
* @param B
* @param beta
* @param C
*/
@Override
public void gemm(char Order, char TransA, char TransB, double alpha, INDArray A, INDArray B, double beta, INDArray C) {
GemmParams params = new GemmParams(A,B,C);
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
dgemm(Order
,TransA
,TransB
,params.getM()
,params.getN()
,params.getK()
,1.0
,A.ordering() == NDArrayFactory.C ? B : A
,params.getLda()
,B.ordering() == NDArrayFactory.C ? A : B
,params.getLdb()
,0
,C
,params.getLdc());
else
sgemm(Order
, TransA
, TransB
, params.getM()
, params.getN()
, params.getK()
, 1.0f
, A.ordering() == NDArrayFactory.C ? B : A
, params.getLda()
,B.ordering() == NDArrayFactory.C ? A : B
, params.getLdb()
, 0
, C
, params.getLdc());
}
/**
* her2k performs a rank-2k update of an n-by-n Hermitian matrix c, that is, one of the following operations:
* c := alpha*a*conjg(b') + conjg(alpha)*b*conjg(a') + beta*c, for trans = 'N'or'n'
* c := alpha*conjg(b')*a + conjg(alpha)*conjg(a')*b + beta*c, for trans = 'C'or'c'
* where c is an n-by-n Hermitian matrix;
* a and b are n-by-k matrices if trans = 'N'or'n',
* a and b are k-by-n matrices if trans = 'C'or'c'.
* @param Order
* @param Side
* @param Uplo
* @param alpha
* @param A
* @param B
* @param beta
* @param C
*/
@Override
public void symm(char Order, char Side, char Uplo, double alpha, INDArray A, INDArray B, double beta, INDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
dsymm(Order,Side,Uplo,C.rows(),C.columns(),alpha,A,A.size(0),B,B.size(0),beta,C,C.size(0));
else
ssymm(Order, Side, Uplo, C.rows(), C.columns(), (float) alpha, A, A.size(0), B, B.size(0), (float) beta, C, C.size(0));
}
/**
* syrk performs a rank-n update of an n-by-n symmetric matrix c, that is, one of the following operations:
* c := alpha*a*a' + beta*c for trans = 'N'or'n'
* c := alpha*a'*a + beta*c for trans = 'T'or't','C'or'c',
* where c is an n-by-n symmetric matrix;
* a is an n-by-k matrix, if trans = 'N'or'n',
* a is a k-by-n matrix, if trans = 'T'or't','C'or'c'.
* @param Order
* @param Uplo
* @param Trans
* @param alpha
* @param A
* @param beta
* @param C
*/
@Override
public void syrk(char Order, char Uplo, char Trans, double alpha, INDArray A, double beta, INDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
dsyrk(Order,Uplo,Trans,C.rows(),1,alpha,A,A.size(0),beta,C,C.size(0));
else
ssyrk(Order,Uplo,Trans,C.rows(),1,(float) alpha,A,A.size(0),(float) beta,C,C.size(0));
}
/**
* yr2k performs a rank-2k update of an n-by-n symmetric matrix c, that is, one of the following operations:
* c := alpha*a*b' + alpha*b*a' + beta*c for trans = 'N'or'n'
* c := alpha*a'*b + alpha*b'*a + beta*c for trans = 'T'or't',
* where c is an n-by-n symmetric matrix;
* a and b are n-by-k matrices, if trans = 'N'or'n',
* a and b are k-by-n matrices, if trans = 'T'or't'.
* @param Order
* @param Uplo
* @param Trans
* @param alpha
* @param A
* @param B
* @param beta
* @param C
*/
@Override
public void syr2k(char Order, char Uplo, char Trans, double alpha, INDArray A, INDArray B, double beta, INDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE) {
dsyr2k(Order,Uplo,Trans,A.rows(),A.columns(),alpha,A,A.size(0),B,B.size(0),beta,C,C.size(0));
}
else
ssyr2k(Order, Uplo, Trans, A.rows(), A.columns(), (float) alpha, A, A.size(0), B, B.size(0), (float) beta, C, C.size(0));
}
/**
* syr2k performs a rank-2k update of an n-by-n symmetric matrix c, that is, one of the following operations:
* c := alpha*a*b' + alpha*b*a' + beta*c for trans = 'N'or'n'
* c := alpha*a'*b + alpha*b'*a + beta*c for trans = 'T'or't',
* where c is an n-by-n symmetric matrix;
* a and b are n-by-k matrices, if trans = 'N'or'n',
* a and b are k-by-n matrices, if trans = 'T'or't'.
* @param Order
* @param Side
* @param Uplo
* @param TransA
* @param Diag
* @param alpha
* @param A
* @param B
* @param C
*/
@Override
public void trmm(char Order, char Side, char Uplo, char TransA, char Diag, double alpha, INDArray A, INDArray B, INDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE) {
dtrmm(Order,Side,Uplo,TransA,Diag,A.rows(),A.columns(),alpha,A,A.size(0),B,B.size(0));
}
else
strmm(Order, Side, Uplo, TransA, Diag, A.rows(), A.columns(), (float) alpha, A, A.size(0), B, B.size(0));
}
/**
* ?trsm solves one of the following matrix equations:
* op(a)*x = alpha*b or x*op(a) = alpha*b,
* where x and b are m-by-n general matrices, and a is triangular;
* op(a) must be an m-by-m matrix, if side = 'L'or'l'
* op(a) must be an n-by-n matrix, if side = 'R'or'r'.
* For the definition of op(a), see Matrix Arguments.
* The routine overwrites x on b.
* @param Order
* @param Side
* @param Uplo
* @param TransA
* @param Diag
* @param alpha
* @param A
* @param B
*/
@Override
public void trsm(char Order, char Side, char Uplo, char TransA, char Diag, double alpha, INDArray A, INDArray B) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE) {
dtrsm(Order,Side,Uplo,TransA,Diag,A.rows(),A.columns(),alpha,A,A.size(0),B,B.size(0));
}
else
strsm(Order, Side, Uplo, TransA, Diag, A.rows(), A.columns(), (float) alpha, A, A.size(0), B, B.size(0));
}
/**
* gemm performs a matrix-matrix operation
* c := alpha*op(a)*op(b) + beta*c,
* where c is an m-by-n matrix,
* op(a) is an m-by-k matrix,
* op(b) is a k-by-n matrix.
* @param Order
* @param TransA
* @param TransB
* @param alpha
* @param A
* @param B
* @param beta
* @param C
*/
@Override
public void gemm(char Order, char TransA, char TransB, IComplexNumber alpha, IComplexNDArray A, IComplexNDArray B, IComplexNumber beta, IComplexNDArray C) {
GemmParams params = new GemmParams(A,B,C);
if(A.data().dataType() == DataBuffer.Type.DOUBLE) {
zgemm(Order
,TransA
,TransB
,params.getM()
,params.getN()
,params.getK(),
alpha.asDouble()
,A.ordering() == NDArrayFactory.C ? B : A
,params.getLda()
,B.ordering() == NDArrayFactory.C ? A : B
,params.getLdb()
,beta.asDouble()
,C
,params.getLdc());
}
else
cgemm(Order
, TransA
, TransB
,params.getM()
, params.getN()
,params.getK()
, alpha.asFloat()
,A.ordering() == NDArrayFactory.C ? B : A
,params.getLda()
,B.ordering() == NDArrayFactory.C ? A : B
, params.getLdb()
, beta.asFloat()
, C
,params.getLdc());
}
/**
* hemm performs one of the following matrix-matrix operations:
* c := alpha*a*b + beta*c for side = 'L'or'l'
* c := alpha*b*a + beta*c for side = 'R'or'r',
* where a is a Hermitian matrix,
* b and c are m-by-n matrices.
* @param Order
* @param Side
* @param Uplo
* @param alpha
* @param A
* @param B
* @param beta
* @param C
*/
@Override
public void hemm(char Order, char Side, char Uplo, IComplexNumber alpha, IComplexNDArray A, IComplexNDArray B, IComplexNumber beta, IComplexNDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
zhemm(Order,Side,Uplo,B.rows(),B.columns(),alpha.asDouble(),A, A.size(0),B,B.size(0),beta.asDouble(),C,C.size(0));
else
chemm(Order, Side, Uplo, B.rows(), B.columns(), alpha.asFloat(), A, A.size(0), B, B.size(0), beta.asFloat(), C, C.size(0));
}
/**
* herk performs a rank-n update of a Hermitian matrix, that is, one of the following operations:
* c := alpha*a*conjug(a') + beta*c for trans = 'N'or'n'
* c := alpha*conjug(a')*a + beta*c for trans = 'C'or'c',
* where c is an n-by-n Hermitian matrix;
* a is an n-by-k matrix, if trans = 'N'or'n',
* a is a k-by-n matrix, if trans = 'C'or'c'.
* @param Order
* @param Uplo
* @param Trans
* @param alpha
* @param A
* @param beta
* @param C
*/
@Override
public void herk(char Order, char Uplo, char Trans, IComplexNumber alpha, IComplexNDArray A, IComplexNumber beta, IComplexNDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
zherk(Order,Uplo,Trans,A.rows(),A.columns(),alpha.asDouble(),A,A.size(0),beta.asDouble(),C,C.size(0));
else
cherk(Order, Uplo, Trans, A.rows(), A.columns(), alpha.asFloat(), A, A.size(0), beta.asFloat(), C, C.size(0));
}
/**
* @param Order
* @param Uplo
* @param Trans
* @param alpha
* @param A
* @param B
* @param beta
* @param C
*/
@Override
public void her2k(char Order, char Uplo, char Trans, IComplexNumber alpha, IComplexNDArray A, IComplexNDArray B, IComplexNumber beta, IComplexNDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
zher2k(Order,Uplo,Trans,A.rows(),A.columns(),alpha.asDouble(),A,A.size(0),B,B.size(0),beta.asDouble(),C,C.size(0));
else
cher2k(Order, Uplo, Trans, A.rows(), A.columns(), alpha.asFloat(), A, A.size(0), B, B.size(0), beta.asFloat(), C, C.size(0));
}
/**
* her2k performs a rank-2k update of an n-by-n Hermitian matrix c, that is, one of the following operations:
* c := alpha*a*conjg(b') + conjg(alpha)*b*conjg(a') + beta*c, for trans = 'N'or'n'
* c := alpha*conjg(b')*a + conjg(alpha)*conjg(a')*b + beta*c, for trans = 'C'or'c'
* where c is an n-by-n Hermitian matrix;
* a and b are n-by-k matrices if trans = 'N'or'n',
* a and b are k-by-n matrices if trans = 'C'or'c'.
* @param Order
* @param Side
* @param Uplo
* @param alpha
* @param A
* @param B
* @param beta
* @param C
*/
@Override
public void symm(char Order, char Side, char Uplo, IComplexNumber alpha, IComplexNDArray A, IComplexNDArray B, IComplexNumber beta, IComplexNDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
zsymm(Order,Side,Uplo,A.rows(),A.columns(),alpha.asDouble(),A,A.size(0),B,B.size(0),beta.asDouble(),C,C.size(0));
else
csymm(Order, Side, Uplo, A.rows(), A.columns(), alpha.asFloat(), A, A.size(0), B, B.size(0), beta.asFloat(), C, C.size(0));
}
/**
* syrk performs a rank-n update of an n-by-n symmetric matrix c, that is, one of the following operations:
* c := alpha*a*a' + beta*c for trans = 'N'or'n'
* c := alpha*a'*a + beta*c for trans = 'T'or't','C'or'c',
* where c is an n-by-n symmetric matrix;
* a is an n-by-k matrix, if trans = 'N'or'n',
* a is a k-by-n matrix, if trans = 'T'or't','C'or'c'.
* @param Order
* @param Uplo
* @param Trans
* @param alpha
* @param A
* @param beta
* @param C
*/
@Override
public void syrk(char Order, char Uplo, char Trans, IComplexNumber alpha, IComplexNDArray A, IComplexNumber beta, IComplexNDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
zsyrk(Order,Uplo,Trans,A.rows(),A.columns(),alpha.asDouble(),A,A.size(0),beta.asDouble(),C,C.size(0));
else
csyrk(Order, Uplo, Trans, A.rows(), A.columns(), alpha.asFloat(), A, A.size(0), beta.asFloat(), C, C.size(0));
}
/**
* yr2k performs a rank-2k update of an n-by-n symmetric matrix c, that is, one of the following operations:
* c := alpha*a*b' + alpha*b*a' + beta*c for trans = 'N'or'n'
* c := alpha*a'*b + alpha*b'*a + beta*c for trans = 'T'or't',
* where c is an n-by-n symmetric matrix;
* a and b are n-by-k matrices, if trans = 'N'or'n',
* a and b are k-by-n matrices, if trans = 'T'or't'.
* @param Order
* @param Uplo
* @param Trans
* @param alpha
* @param A
* @param B
* @param beta
* @param C
*/
@Override
public void syr2k(char Order, char Uplo, char Trans, IComplexNumber alpha, IComplexNDArray A, IComplexNDArray B, IComplexNumber beta, IComplexNDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
zsyr2k(Order,Uplo,Trans,A.rows(),A.columns(),alpha.asDouble(),A,A.size(0),B,B.size(0),beta.asDouble(),C,C.size(0));
else
csyr2k(Order, Uplo, Trans, A.rows(), A.columns(), alpha.asFloat(), A, A.size(0), B, B.size(0), beta.asFloat(), C, C.size(0));
}
/**
* syr2k performs a rank-2k update of an n-by-n symmetric matrix c, that is, one of the following operations:
* c := alpha*a*b' + alpha*b*a' + beta*c for trans = 'N'or'n'
* c := alpha*a'*b + alpha*b'*a + beta*c for trans = 'T'or't',
* where c is an n-by-n symmetric matrix;
* a and b are n-by-k matrices, if trans = 'N'or'n',
* a and b are k-by-n matrices, if trans = 'T'or't'.
* @param Order
* @param Side
* @param Uplo
* @param TransA
* @param Diag
* @param alpha
* @param A
* @param B
* @param C
*/
@Override
public void trmm(char Order, char Side, char Uplo, char TransA, char Diag, IComplexNumber alpha, IComplexNDArray A, IComplexNDArray B, IComplexNDArray C) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
ztrmm(Order,Side,Uplo,TransA,Diag,A.rows(),A.columns(),alpha.asDouble(),A,A.size(0),B,B.size(0),C,C.size(0));
else
ctrmm(Order,Side,Uplo,TransA,Diag,A.rows(),A.columns(),alpha.asFloat(),A,A.size(0),B,B.size(0),C,C.size(0));
}
/**
* ?trsm solves one of the following matrix equations:
* op(a)*x = alpha*b or x*op(a) = alpha*b,
* where x and b are m-by-n general matrices, and a is triangular;
* op(a) must be an m-by-m matrix, if side = 'L'or'l'
* op(a) must be an n-by-n matrix, if side = 'R'or'r'.
* For the definition of op(a), see Matrix Arguments.
* The routine overwrites x on b.
* @param Order
* @param Side
* @param Uplo
* @param TransA
* @param Diag
* @param alpha
* @param A
* @param B
*/
@Override
public void trsm(char Order, char Side, char Uplo, char TransA, char Diag, IComplexNumber alpha, IComplexNDArray A, IComplexNDArray B) {
if(A.data().dataType() == DataBuffer.Type.DOUBLE)
ztrsm(Order,Side,Uplo,TransA,Diag,A.rows(),A.columns(),alpha.asDouble(),A,A.size(0),B,B.size(0));
else
ctrsm(Order,Side,Uplo,TransA,Diag,A.rows(),A.columns(),alpha.asFloat(),A,A.size(0),B,B.size(0));
}
/*
* ===========================================================================
* Prototypes for level 3 BLAS
* ===========================================================================
*/
/*
* Routines with standard 4 prefixes (S, D, C, Z)
*/
protected abstract void sgemm( char Order, char TransA,
char TransB, int M, int N,
int K, float alpha, INDArray A,
int lda, INDArray B, int ldb,
float beta, INDArray C, int ldc);
protected abstract void ssymm( char Order, char Side,
char Uplo, int M, int N,
float alpha, INDArray A, int lda,
INDArray B, int ldb, float beta,
INDArray C, int ldc);
protected abstract void ssyrk( char Order, char Uplo,
char Trans, int N, int K,
float alpha, INDArray A, int lda,
float beta, INDArray C, int ldc);
protected abstract void ssyr2k( char Order, char Uplo,
char Trans, int N, int K,
float alpha, INDArray A, int lda,
INDArray B, int ldb, float beta,
INDArray C, int ldc);
protected abstract void strmm(char Order, char Side,
char Uplo, char TransA,
char Diag, int M, int N,
float alpha, INDArray A, int lda,
INDArray B, int ldb);
protected abstract void strsm( char Order, char Side,
char Uplo, char TransA,
char Diag, int M, int N,
float alpha, INDArray A, int lda,
INDArray B, int ldb);
protected abstract void dgemm( char Order, char TransA,
char TransB, int M, int N,
int K, double alpha, INDArray A,
int lda, INDArray B, int ldb,
double beta, INDArray C, int ldc);
protected abstract void dsymm( char Order, char Side,
char Uplo, int M, int N,
double alpha, INDArray A, int lda,
INDArray B, int ldb, double beta,
INDArray C, int ldc);
protected abstract void dsyrk( char Order, char Uplo,
char Trans, int N, int K,
double alpha, INDArray A, int lda,
double beta, INDArray C, int ldc);
protected abstract void dsyr2k( char Order, char Uplo,
char Trans, int N, int K,
double alpha, INDArray A, int lda,
INDArray B, int ldb, double beta,
INDArray C, int ldc);
protected abstract void dtrmm( char Order, char Side,
char Uplo, char TransA,
char Diag, int M, int N,
double alpha, INDArray A, int lda,
INDArray B, int ldb);
protected abstract void dtrsm( char Order, char Side,
char Uplo, char TransA,
char Diag, int M, int N,
double alpha, INDArray A, int lda,
INDArray B, int ldb);
protected abstract void cgemm( char Order, char TransA,
char TransB, int M, int N,
int K, IComplexFloat alpha, IComplexNDArray A,
int lda, IComplexNDArray B, int ldb,
IComplexFloat beta, IComplexNDArray C, int ldc);
protected abstract void csymm( char Order, char Side,
char Uplo, int M, int N,
IComplexFloat alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexFloat beta,
IComplexNDArray C, int ldc);
protected abstract void csyrk( char Order, char Uplo,
char Trans, int N, int K,
IComplexFloat alpha, IComplexNDArray A, int lda,
IComplexFloat beta, IComplexNDArray C, int ldc);
protected abstract void csyr2k( char Order, char Uplo,
char Trans, int N, int K,
IComplexFloat alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexFloat beta,
IComplexNDArray C, int ldc);
protected abstract void ctrmm(char Order, char Side,
char Uplo, char TransA,
char Diag, int M, int N,
IComplexFloat alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexNDArray C, int ldc);
protected abstract void ctrsm( char Order, char Side,
char Uplo, char TransA,
char Diag, int M, int N,
IComplexFloat alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb);
protected abstract void zgemm( char Order, char TransA,
char TransB, int M, int N,
int K, IComplexDouble alpha, IComplexNDArray A,
int lda, IComplexNDArray B, int ldb,
IComplexDouble beta, IComplexNDArray C, int ldc);
protected abstract void zsymm( char Order, char Side,
char Uplo, int M, int N,
IComplexDouble alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexDouble beta,
IComplexNDArray C, int ldc);
protected abstract void zsyrk( char Order, char Uplo,
char Trans, int N, int K,
IComplexDouble alpha, IComplexNDArray A, int lda,
IComplexDouble beta, IComplexNDArray C, int ldc);
protected abstract void zsyr2k( char Order, char Uplo,
char Trans, int N, int K,
IComplexDouble alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexDouble beta,
IComplexNDArray C, int ldc);
protected abstract void ztrmm(char Order, char Side,
char Uplo, char TransA,
char Diag, int M, int N,
IComplexDouble alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexNDArray C, int ldc);
protected abstract void ztrsm( char Order, char Side,
char Uplo, char TransA,
char Diag, int M, int N,
IComplexDouble alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb);
/*
* Routines with prefixes C and Z only
*/
protected abstract void chemm( char Order, char Side,
char Uplo, int M, int N,
IComplexFloat alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexFloat beta,
IComplexNDArray C, int ldc);
protected abstract void cherk( char Order, char Uplo,
char Trans, int N, int K,
IComplexFloat alpha, IComplexNDArray A, int lda,
IComplexFloat beta, IComplexNDArray C, int ldc);
protected abstract void cher2k( char Order, char Uplo,
char Trans, int N, int K,
IComplexFloat alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexFloat beta,
IComplexNDArray C, int ldc);
protected abstract void zhemm( char Order, char Side,
char Uplo, int M, int N,
IComplexDouble alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexDouble beta,
IComplexNDArray C, int ldc);
protected abstract void zherk( char Order, char Uplo,
char Trans, int N, int K,
IComplexDouble alpha, IComplexNDArray A, int lda,
IComplexDouble beta, IComplexNDArray C, int ldc);
protected abstract void zher2k( char Order, char Uplo,
char Trans, int N, int K,
IComplexDouble alpha, IComplexNDArray A, int lda,
IComplexNDArray B, int ldb, IComplexDouble beta,
IComplexNDArray C, int ldc);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql.fun;
import org.apache.calcite.sql.SqlAggFunction;
import org.apache.calcite.sql.SqlAsOperator;
import org.apache.calcite.sql.SqlBinaryOperator;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlFilterOperator;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlFunctionalOperator;
import org.apache.calcite.sql.SqlInternalOperator;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlOperandCountRange;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlOverOperator;
import org.apache.calcite.sql.SqlPostfixOperator;
import org.apache.calcite.sql.SqlPrefixOperator;
import org.apache.calcite.sql.SqlProcedureCallOperator;
import org.apache.calcite.sql.SqlRankFunction;
import org.apache.calcite.sql.SqlSampleSpec;
import org.apache.calcite.sql.SqlSetOperator;
import org.apache.calcite.sql.SqlSpecialOperator;
import org.apache.calcite.sql.SqlUnnestOperator;
import org.apache.calcite.sql.SqlUtil;
import org.apache.calcite.sql.SqlValuesOperator;
import org.apache.calcite.sql.SqlWindow;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.type.InferTypes;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlOperandCountRanges;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.util.ReflectiveSqlOperatorTable;
import org.apache.calcite.sql.validate.SqlModality;
import org.apache.calcite.util.Litmus;
/**
* Implementation of {@link org.apache.calcite.sql.SqlOperatorTable} containing
* the standard operators and functions.
*/
public class SqlStdOperatorTable extends ReflectiveSqlOperatorTable {
//~ Static fields/initializers ---------------------------------------------
/**
* The standard operator table.
*/
private static SqlStdOperatorTable instance;
//-------------------------------------------------------------
// SET OPERATORS
//-------------------------------------------------------------
// The set operators can be compared to the arithmetic operators
// UNION -> +
// EXCEPT -> -
// INTERSECT -> *
// which explains the different precedence values
public static final SqlSetOperator UNION =
new SqlSetOperator("UNION", SqlKind.UNION, 14, false);
public static final SqlSetOperator UNION_ALL =
new SqlSetOperator("UNION ALL", SqlKind.UNION, 14, true);
public static final SqlSetOperator EXCEPT =
new SqlSetOperator("EXCEPT", SqlKind.EXCEPT, 14, false);
public static final SqlSetOperator EXCEPT_ALL =
new SqlSetOperator("EXCEPT ALL", SqlKind.EXCEPT, 14, true);
public static final SqlSetOperator INTERSECT =
new SqlSetOperator("INTERSECT", SqlKind.INTERSECT, 18, false);
public static final SqlSetOperator INTERSECT_ALL =
new SqlSetOperator("INTERSECT ALL", SqlKind.INTERSECT, 18, true);
/**
* The "MULTISET UNION" operator.
*/
public static final SqlMultisetSetOperator MULTISET_UNION =
new SqlMultisetSetOperator("MULTISET UNION", 14, false);
/**
* The "MULTISET UNION ALL" operator.
*/
public static final SqlMultisetSetOperator MULTISET_UNION_ALL =
new SqlMultisetSetOperator("MULTISET UNION ALL", 14, true);
/**
* The "MULTISET EXCEPT" operator.
*/
public static final SqlMultisetSetOperator MULTISET_EXCEPT =
new SqlMultisetSetOperator("MULTISET EXCEPT", 14, false);
/**
* The "MULTISET EXCEPT ALL" operator.
*/
public static final SqlMultisetSetOperator MULTISET_EXCEPT_ALL =
new SqlMultisetSetOperator("MULTISET EXCEPT ALL", 14, true);
/**
* The "MULTISET INTERSECT" operator.
*/
public static final SqlMultisetSetOperator MULTISET_INTERSECT =
new SqlMultisetSetOperator("MULTISET INTERSECT", 18, false);
/**
* The "MULTISET INTERSECT ALL" operator.
*/
public static final SqlMultisetSetOperator MULTISET_INTERSECT_ALL =
new SqlMultisetSetOperator("MULTISET INTERSECT ALL", 18, true);
//-------------------------------------------------------------
// BINARY OPERATORS
//-------------------------------------------------------------
/**
* Logical <code>AND</code> operator.
*/
public static final SqlBinaryOperator AND =
new SqlBinaryOperator(
"AND",
SqlKind.AND,
28,
true,
ReturnTypes.ARG0_NULLABLE, // more efficient than BOOLEAN_NULLABLE
InferTypes.BOOLEAN,
OperandTypes.BOOLEAN_BOOLEAN);
/**
* <code>AS</code> operator associates an expression in the SELECT clause
* with an alias.
*/
public static final SqlAsOperator AS = new SqlAsOperator();
/**
* <code>ARGUMENT_ASSIGNMENT</code> operator (<code>=<</code>)
* assigns an argument to a function call to a particular named parameter.
*/
public static final SqlSpecialOperator ARGUMENT_ASSIGNMENT =
new SqlArgumentAssignmentOperator();
/**
* <code>DEFAULT</code> operator indicates that an argument to a function call
* is to take its default value..
*/
public static final SqlSpecialOperator DEFAULT = new SqlDefaultOperator();
/** <code>FILTER</code> operator filters which rows are included in an
* aggregate function. */
public static final SqlFilterOperator FILTER = new SqlFilterOperator();
/** {@code CUBE} operator, occurs within {@code GROUP BY} clause
* or nested within a {@code GROUPING SETS}. */
public static final SqlInternalOperator CUBE =
new SqlRollupOperator("CUBE", SqlKind.CUBE);
/** {@code ROLLUP} operator, occurs within {@code GROUP BY} clause
* or nested within a {@code GROUPING SETS}. */
public static final SqlInternalOperator ROLLUP =
new SqlRollupOperator("ROLLUP", SqlKind.ROLLUP);
/** {@code GROUPING SETS} operator, occurs within {@code GROUP BY} clause
* or nested within a {@code GROUPING SETS}. */
public static final SqlInternalOperator GROUPING_SETS =
new SqlRollupOperator("GROUPING SETS", SqlKind.GROUPING_SETS);
/** {@code GROUPING} function. Occurs in similar places to an aggregate
* function ({@code SELECT}, {@code HAVING} clause, etc. of an aggregate
* query), but not technically an aggregate function. */
public static final SqlGroupingFunction GROUPING =
new SqlGroupingFunction();
/** {@code GROUP_ID} function. */
public static final SqlGroupIdFunction GROUP_ID =
new SqlGroupIdFunction();
/** {@code GROUPING_ID} function. */
public static final SqlGroupingIdFunction GROUPING_ID =
new SqlGroupingIdFunction();
/** {@code EXTEND} operator. */
public static final SqlInternalOperator EXTEND = new SqlExtendOperator();
/**
* String concatenation operator, '<code>||</code>'.
*/
public static final SqlBinaryOperator CONCAT =
new SqlBinaryOperator(
"||",
SqlKind.OTHER,
60,
true,
ReturnTypes.DYADIC_STRING_SUM_PRECISION_NULLABLE,
null,
OperandTypes.STRING_SAME_SAME);
/**
* Arithmetic division operator, '<code>/</code>'.
*/
public static final SqlBinaryOperator DIVIDE =
new SqlBinaryOperator(
"/",
SqlKind.DIVIDE,
60,
true,
ReturnTypes.QUOTIENT_NULLABLE,
InferTypes.FIRST_KNOWN,
OperandTypes.DIVISION_OPERATOR);
/**
* Internal integer arithmetic division operator, '<code>/INT</code>'. This
* is only used to adjust scale for numerics. We distinguish it from
* user-requested division since some personalities want a floating-point
* computation, whereas for the internal scaling use of division, we always
* want integer division.
*/
public static final SqlBinaryOperator DIVIDE_INTEGER =
new SqlBinaryOperator(
"/INT",
SqlKind.DIVIDE,
60,
true,
ReturnTypes.INTEGER_QUOTIENT_NULLABLE,
InferTypes.FIRST_KNOWN,
OperandTypes.DIVISION_OPERATOR);
/**
* Dot operator, '<code>.</code>', used for referencing fields of records.
*/
public static final SqlBinaryOperator DOT =
new SqlBinaryOperator(
".",
SqlKind.DOT,
80,
true,
null,
null,
OperandTypes.ANY_ANY);
/**
* Logical equals operator, '<code>=</code>'.
*/
public static final SqlBinaryOperator EQUALS =
new SqlBinaryOperator(
"=",
SqlKind.EQUALS,
30,
true,
ReturnTypes.BOOLEAN_NULLABLE,
InferTypes.FIRST_KNOWN,
OperandTypes.COMPARABLE_UNORDERED_COMPARABLE_UNORDERED);
/**
* Logical greater-than operator, '<code>></code>'.
*/
public static final SqlBinaryOperator GREATER_THAN =
new SqlBinaryOperator(
">",
SqlKind.GREATER_THAN,
30,
true,
ReturnTypes.BOOLEAN_NULLABLE,
InferTypes.FIRST_KNOWN,
OperandTypes.COMPARABLE_ORDERED_COMPARABLE_ORDERED);
/**
* <code>IS DISTINCT FROM</code> operator.
*/
public static final SqlBinaryOperator IS_DISTINCT_FROM =
new SqlBinaryOperator(
"IS DISTINCT FROM",
SqlKind.IS_DISTINCT_FROM,
30,
true,
ReturnTypes.BOOLEAN,
InferTypes.FIRST_KNOWN,
OperandTypes.COMPARABLE_UNORDERED_COMPARABLE_UNORDERED);
/**
* <code>IS NOT DISTINCT FROM</code> operator. Is equivalent to <code>NOT(x
* IS DISTINCT FROM y)</code>
*/
public static final SqlBinaryOperator IS_NOT_DISTINCT_FROM =
new SqlBinaryOperator(
"IS NOT DISTINCT FROM",
SqlKind.IS_NOT_DISTINCT_FROM,
30,
true,
ReturnTypes.BOOLEAN,
InferTypes.FIRST_KNOWN,
OperandTypes.COMPARABLE_UNORDERED_COMPARABLE_UNORDERED);
/**
* The internal <code>$IS_DIFFERENT_FROM</code> operator is the same as the
* user-level {@link #IS_DISTINCT_FROM} in all respects except that
* the test for equality on character datatypes treats trailing spaces as
* significant.
*/
public static final SqlBinaryOperator IS_DIFFERENT_FROM =
new SqlBinaryOperator(
"$IS_DIFFERENT_FROM",
SqlKind.OTHER,
30,
true,
ReturnTypes.BOOLEAN,
InferTypes.FIRST_KNOWN,
OperandTypes.COMPARABLE_UNORDERED_COMPARABLE_UNORDERED);
/**
* Logical greater-than-or-equal operator, '<code>>=</code>'.
*/
public static final SqlBinaryOperator GREATER_THAN_OR_EQUAL =
new SqlBinaryOperator(
">=",
SqlKind.GREATER_THAN_OR_EQUAL,
30,
true,
ReturnTypes.BOOLEAN_NULLABLE,
InferTypes.FIRST_KNOWN,
OperandTypes.COMPARABLE_ORDERED_COMPARABLE_ORDERED);
/**
* <code>IN</code> operator tests for a value's membership in a subquery or
* a list of values.
*/
public static final SqlBinaryOperator IN = new SqlInOperator(false);
/**
* <code>NOT IN</code> operator tests for a value's membership in a subquery
* or a list of values.
*/
public static final SqlBinaryOperator NOT_IN =
new SqlInOperator(true);
/**
* Logical less-than operator, '<code><</code>'.
*/
public static final SqlBinaryOperator LESS_THAN =
new SqlBinaryOperator(
"<",
SqlKind.LESS_THAN,
30,
true,
ReturnTypes.BOOLEAN_NULLABLE,
InferTypes.FIRST_KNOWN,
OperandTypes.COMPARABLE_ORDERED_COMPARABLE_ORDERED);
/**
* Logical less-than-or-equal operator, '<code><=</code>'.
*/
public static final SqlBinaryOperator LESS_THAN_OR_EQUAL =
new SqlBinaryOperator(
"<=",
SqlKind.LESS_THAN_OR_EQUAL,
30,
true,
ReturnTypes.BOOLEAN_NULLABLE,
InferTypes.FIRST_KNOWN,
OperandTypes.COMPARABLE_ORDERED_COMPARABLE_ORDERED);
/**
* Infix arithmetic minus operator, '<code>-</code>'.
*
* <p>Its precedence is less than the prefix {@link #UNARY_PLUS +}
* and {@link #UNARY_MINUS -} operators.
*/
public static final SqlBinaryOperator MINUS =
new SqlMonotonicBinaryOperator(
"-",
SqlKind.MINUS,
40,
true,
// Same type inference strategy as sum
ReturnTypes.NULLABLE_SUM,
InferTypes.FIRST_KNOWN,
OperandTypes.MINUS_OPERATOR);
/**
* Arithmetic multiplication operator, '<code>*</code>'.
*/
public static final SqlBinaryOperator MULTIPLY =
new SqlMonotonicBinaryOperator(
"*",
SqlKind.TIMES,
60,
true,
ReturnTypes.PRODUCT_NULLABLE,
InferTypes.FIRST_KNOWN,
OperandTypes.MULTIPLY_OPERATOR);
/**
* Logical not-equals operator, '<code><></code>'.
*/
public static final SqlBinaryOperator NOT_EQUALS =
new SqlBinaryOperator(
"<>",
SqlKind.NOT_EQUALS,
30,
true,
ReturnTypes.BOOLEAN_NULLABLE,
InferTypes.FIRST_KNOWN,
OperandTypes.COMPARABLE_UNORDERED_COMPARABLE_UNORDERED);
/**
* Logical <code>OR</code> operator.
*/
public static final SqlBinaryOperator OR =
new SqlBinaryOperator(
"OR",
SqlKind.OR,
26,
true,
ReturnTypes.ARG0_NULLABLE, // more efficient than BOOLEAN_NULLABLE
InferTypes.BOOLEAN,
OperandTypes.BOOLEAN_BOOLEAN);
/**
* Infix arithmetic plus operator, '<code>+</code>'.
*/
public static final SqlBinaryOperator PLUS =
new SqlMonotonicBinaryOperator(
"+",
SqlKind.PLUS,
40,
true,
ReturnTypes.NULLABLE_SUM,
InferTypes.FIRST_KNOWN,
OperandTypes.PLUS_OPERATOR);
/**
* Infix datetime plus operator, '<code>DATETIME + INTERVAL</code>'.
*/
public static final SqlSpecialOperator DATETIME_PLUS =
new SqlSpecialOperator(
"DATETIME_PLUS",
SqlKind.PLUS,
40,
true,
ReturnTypes.NULLABLE_SUM,
InferTypes.FIRST_KNOWN,
OperandTypes.PLUS_OPERATOR);
/**
* Multiset MEMBER OF. Checks to see if a element belongs to a multiset.<br>
* Example:<br>
* <code>'green' MEMBER OF MULTISET['red','almost green','blue']</code>
* returns <code>false</code>.
*/
public static final SqlBinaryOperator MEMBER_OF =
new SqlMultisetMemberOfOperator();
/**
* Submultiset. Checks to see if an multiset is a sub-set of another
* multiset.<br>
* Example:<br>
* <code>MULTISET['green'] SUBMULTISET OF MULTISET['red','almost
* green','blue']</code> returns <code>false</code>.
*
* <p>But <code>MULTISET['blue', 'red'] SUBMULTISET OF
* MULTISET['red','almost green','blue']</code> returns <code>true</code>
* (<b>NB</b> multisets is order independant)
*/
public static final SqlBinaryOperator SUBMULTISET_OF =
// TODO: check if precedence is correct
new SqlBinaryOperator(
"SUBMULTISET OF",
SqlKind.OTHER,
30,
true,
ReturnTypes.BOOLEAN_NULLABLE,
null,
OperandTypes.MULTISET_MULTISET);
//-------------------------------------------------------------
// POSTFIX OPERATORS
//-------------------------------------------------------------
public static final SqlPostfixOperator DESC =
new SqlPostfixOperator(
"DESC",
SqlKind.DESCENDING,
20,
ReturnTypes.ARG0,
InferTypes.RETURN_TYPE,
OperandTypes.ANY);
public static final SqlPostfixOperator NULLS_FIRST =
new SqlPostfixOperator(
"NULLS FIRST",
SqlKind.NULLS_FIRST,
18,
ReturnTypes.ARG0,
InferTypes.RETURN_TYPE,
OperandTypes.ANY);
public static final SqlPostfixOperator NULLS_LAST =
new SqlPostfixOperator(
"NULLS LAST",
SqlKind.NULLS_LAST,
18,
ReturnTypes.ARG0,
InferTypes.RETURN_TYPE,
OperandTypes.ANY);
public static final SqlPostfixOperator IS_NOT_NULL =
new SqlPostfixOperator(
"IS NOT NULL",
SqlKind.IS_NOT_NULL,
30,
ReturnTypes.BOOLEAN_NOT_NULL,
InferTypes.VARCHAR_1024,
OperandTypes.ANY);
public static final SqlPostfixOperator IS_NULL =
new SqlPostfixOperator(
"IS NULL",
SqlKind.IS_NULL,
30,
ReturnTypes.BOOLEAN_NOT_NULL,
InferTypes.VARCHAR_1024,
OperandTypes.ANY);
public static final SqlPostfixOperator IS_NOT_TRUE =
new SqlPostfixOperator(
"IS NOT TRUE",
SqlKind.IS_NOT_TRUE,
30,
ReturnTypes.BOOLEAN_NOT_NULL,
InferTypes.BOOLEAN,
OperandTypes.BOOLEAN);
public static final SqlPostfixOperator IS_TRUE =
new SqlPostfixOperator(
"IS TRUE",
SqlKind.IS_TRUE,
30,
ReturnTypes.BOOLEAN_NOT_NULL,
InferTypes.BOOLEAN,
OperandTypes.BOOLEAN);
public static final SqlPostfixOperator IS_NOT_FALSE =
new SqlPostfixOperator(
"IS NOT FALSE",
SqlKind.IS_NOT_FALSE,
30,
ReturnTypes.BOOLEAN_NOT_NULL,
InferTypes.BOOLEAN,
OperandTypes.BOOLEAN);
public static final SqlPostfixOperator IS_FALSE =
new SqlPostfixOperator(
"IS FALSE",
SqlKind.IS_FALSE,
30,
ReturnTypes.BOOLEAN_NOT_NULL,
InferTypes.BOOLEAN,
OperandTypes.BOOLEAN);
public static final SqlPostfixOperator IS_NOT_UNKNOWN =
new SqlPostfixOperator(
"IS NOT UNKNOWN",
SqlKind.IS_NOT_NULL,
30,
ReturnTypes.BOOLEAN_NOT_NULL,
InferTypes.BOOLEAN,
OperandTypes.BOOLEAN);
public static final SqlPostfixOperator IS_UNKNOWN =
new SqlPostfixOperator(
"IS UNKNOWN",
SqlKind.IS_NULL,
30,
ReturnTypes.BOOLEAN_NOT_NULL,
InferTypes.BOOLEAN,
OperandTypes.BOOLEAN);
public static final SqlPostfixOperator IS_A_SET =
new SqlPostfixOperator(
"IS A SET",
SqlKind.OTHER,
30,
ReturnTypes.BOOLEAN,
null,
OperandTypes.MULTISET);
//-------------------------------------------------------------
// PREFIX OPERATORS
//-------------------------------------------------------------
public static final SqlPrefixOperator EXISTS =
new SqlPrefixOperator(
"EXISTS",
SqlKind.EXISTS,
40,
ReturnTypes.BOOLEAN,
null,
OperandTypes.ANY) {
public boolean argumentMustBeScalar(int ordinal) {
return false;
}
@Override public boolean validRexOperands(int count, Litmus litmus) {
if (count != 0) {
return litmus.fail("wrong operand count {} for {}", count, this);
}
return litmus.succeed();
}
};
public static final SqlPrefixOperator NOT =
new SqlPrefixOperator(
"NOT",
SqlKind.NOT,
30,
ReturnTypes.ARG0,
InferTypes.BOOLEAN,
OperandTypes.BOOLEAN);
/**
* Prefix arithmetic minus operator, '<code>-</code>'.
*
* <p>Its precedence is greater than the infix '{@link #PLUS +}' and
* '{@link #MINUS -}' operators.
*/
public static final SqlPrefixOperator UNARY_MINUS =
new SqlPrefixOperator(
"-",
SqlKind.MINUS_PREFIX,
80,
ReturnTypes.ARG0,
InferTypes.RETURN_TYPE,
OperandTypes.NUMERIC_OR_INTERVAL);
/**
* Prefix arithmetic plus operator, '<code>+</code>'.
*
* <p>Its precedence is greater than the infix '{@link #PLUS +}' and
* '{@link #MINUS -}' operators.
*/
public static final SqlPrefixOperator UNARY_PLUS =
new SqlPrefixOperator(
"+",
SqlKind.PLUS_PREFIX,
80,
ReturnTypes.ARG0,
InferTypes.RETURN_TYPE,
OperandTypes.NUMERIC_OR_INTERVAL);
/**
* Keyword which allows an identifier to be explicitly flagged as a table.
* For example, <code>select * from (TABLE t)</code> or <code>TABLE
* t</code>. See also {@link #COLLECTION_TABLE}.
*/
public static final SqlPrefixOperator EXPLICIT_TABLE =
new SqlPrefixOperator(
"TABLE",
SqlKind.EXPLICIT_TABLE,
2,
null,
null,
null);
//-------------------------------------------------------------
// AGGREGATE OPERATORS
//-------------------------------------------------------------
/**
* <code>SUM</code> aggregate function.
*/
public static final SqlAggFunction SUM = new SqlSumAggFunction(null);
/**
* <code>COUNT</code> aggregate function.
*/
public static final SqlAggFunction COUNT = new SqlCountAggFunction();
/**
* <code>MIN</code> aggregate function.
*/
public static final SqlAggFunction MIN =
new SqlMinMaxAggFunction(SqlKind.MIN);
/**
* <code>MAX</code> aggregate function.
*/
public static final SqlAggFunction MAX =
new SqlMinMaxAggFunction(SqlKind.MAX);
/**
* <code>LAST_VALUE</code> aggregate function.
*/
public static final SqlAggFunction LAST_VALUE =
new SqlFirstLastValueAggFunction(SqlKind.LAST_VALUE);
/**
* <code>FIRST_VALUE</code> aggregate function.
*/
public static final SqlAggFunction FIRST_VALUE =
new SqlFirstLastValueAggFunction(SqlKind.FIRST_VALUE);
/**
* <code>LEAD</code> aggregate function.
*/
public static final SqlAggFunction LEAD =
new SqlLeadLagAggFunction(SqlKind.LEAD);
/**
* <code>LAG</code> aggregate function.
*/
public static final SqlAggFunction LAG =
new SqlLeadLagAggFunction(SqlKind.LAG);
/**
* <code>NTILE</code> aggregate function.
*/
public static final SqlAggFunction NTILE =
new SqlNtileAggFunction();
/**
* <code>SINGLE_VALUE</code> aggregate function.
*/
public static final SqlAggFunction SINGLE_VALUE =
new SqlSingleValueAggFunction(null);
/**
* <code>AVG</code> aggregate function.
*/
public static final SqlAggFunction AVG =
new SqlAvgAggFunction(SqlKind.AVG);
/**
* <code>STDDEV_POP</code> aggregate function.
*/
public static final SqlAggFunction STDDEV_POP =
new SqlAvgAggFunction(SqlKind.STDDEV_POP);
/**
* <code>REGR_SXX</code> aggregate function.
*/
public static final SqlAggFunction REGR_SXX =
new SqlCovarAggFunction(SqlKind.REGR_SXX);
/**
* <code>REGR_SYY</code> aggregate function.
*/
public static final SqlAggFunction REGR_SYY =
new SqlCovarAggFunction(SqlKind.REGR_SYY);
/**
* <code>COVAR_POP</code> aggregate function.
*/
public static final SqlAggFunction COVAR_POP =
new SqlCovarAggFunction(SqlKind.COVAR_POP);
/**
* <code>COVAR_SAMP</code> aggregate function.
*/
public static final SqlAggFunction COVAR_SAMP =
new SqlCovarAggFunction(SqlKind.COVAR_SAMP);
/**
* <code>STDDEV_SAMP</code> aggregate function.
*/
public static final SqlAggFunction STDDEV_SAMP =
new SqlAvgAggFunction(SqlKind.STDDEV_SAMP);
/**
* <code>VAR_POP</code> aggregate function.
*/
public static final SqlAggFunction VAR_POP =
new SqlAvgAggFunction(SqlKind.VAR_POP);
/**
* <code>VAR_SAMP</code> aggregate function.
*/
public static final SqlAggFunction VAR_SAMP =
new SqlAvgAggFunction(SqlKind.VAR_SAMP);
//-------------------------------------------------------------
// WINDOW Aggregate Functions
//-------------------------------------------------------------
/**
* <code>HISTOGRAM</code> aggregate function support. Used by window
* aggregate versions of MIN/MAX
*/
public static final SqlAggFunction HISTOGRAM_AGG =
new SqlHistogramAggFunction(null);
/**
* <code>HISTOGRAM_MIN</code> window aggregate function.
*/
public static final SqlFunction HISTOGRAM_MIN =
new SqlFunction(
"$HISTOGRAM_MIN",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0_NULLABLE,
null,
OperandTypes.NUMERIC_OR_STRING,
SqlFunctionCategory.NUMERIC);
/**
* <code>HISTOGRAM_MAX</code> window aggregate function.
*/
public static final SqlFunction HISTOGRAM_MAX =
new SqlFunction(
"$HISTOGRAM_MAX",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0_NULLABLE,
null,
OperandTypes.NUMERIC_OR_STRING,
SqlFunctionCategory.NUMERIC);
/**
* <code>HISTOGRAM_FIRST_VALUE</code> window aggregate function.
*/
public static final SqlFunction HISTOGRAM_FIRST_VALUE =
new SqlFunction(
"$HISTOGRAM_FIRST_VALUE",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0_NULLABLE,
null,
OperandTypes.NUMERIC_OR_STRING,
SqlFunctionCategory.NUMERIC);
/**
* <code>HISTOGRAM_LAST_VALUE</code> window aggregate function.
*/
public static final SqlFunction HISTOGRAM_LAST_VALUE =
new SqlFunction(
"$HISTOGRAM_LAST_VALUE",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0_NULLABLE,
null,
OperandTypes.NUMERIC_OR_STRING,
SqlFunctionCategory.NUMERIC);
/**
* <code>SUM0</code> aggregate function.
*/
public static final SqlAggFunction SUM0 =
new SqlSumEmptyIsZeroAggFunction();
//-------------------------------------------------------------
// WINDOW Rank Functions
//-------------------------------------------------------------
/**
* <code>CUME_DIST</code> window function.
*/
public static final SqlRankFunction CUME_DIST =
new SqlRankFunction(true, SqlKind.CUME_DIST);
/**
* <code>DENSE_RANK</code> window function.
*/
public static final SqlRankFunction DENSE_RANK =
new SqlRankFunction(true, SqlKind.DENSE_RANK);
/**
* <code>PERCENT_RANK</code> window function.
*/
public static final SqlRankFunction PERCENT_RANK =
new SqlRankFunction(true, SqlKind.PERCENT_RANK);
/**
* <code>RANK</code> window function.
*/
public static final SqlRankFunction RANK =
new SqlRankFunction(true, SqlKind.RANK);
/**
* <code>ROW_NUMBER</code> window function.
*/
public static final SqlRankFunction ROW_NUMBER =
new SqlRankFunction(false, SqlKind.ROW_NUMBER);
//-------------------------------------------------------------
// SPECIAL OPERATORS
//-------------------------------------------------------------
public static final SqlRowOperator ROW = new SqlRowOperator();
/**
* A special operator for the subtraction of two DATETIMEs. The format of
* DATETIME substraction is:
*
* <blockquote><code>"(" <datetime> "-" <datetime> ")"
* <interval qualifier></code></blockquote>
*
* <p>This operator is special since it needs to hold the
* additional interval qualifier specification.</p>
*/
public static final SqlOperator MINUS_DATE =
new SqlDatetimeSubtractionOperator();
/**
* The MULTISET Value Constructor. e.g. "<code>MULTISET[1,2,3]</code>".
*/
public static final SqlMultisetValueConstructor MULTISET_VALUE =
new SqlMultisetValueConstructor();
/**
* The MULTISET Query Constructor. e.g. "<code>SELECT dname, MULTISET(SELECT
* FROM emp WHERE deptno = dept.deptno) FROM dept</code>".
*/
public static final SqlMultisetQueryConstructor MULTISET_QUERY =
new SqlMultisetQueryConstructor();
/**
* The ARRAY Query Constructor. e.g. "<code>SELECT dname, ARRAY(SELECT
* FROM emp WHERE deptno = dept.deptno) FROM dept</code>".
*/
public static final SqlMultisetQueryConstructor ARRAY_QUERY =
new SqlArrayQueryConstructor();
/**
* The MAP Query Constructor. e.g. "<code>MAP(SELECT empno, deptno
* FROM emp)</code>".
*/
public static final SqlMultisetQueryConstructor MAP_QUERY =
new SqlMapQueryConstructor();
/**
* The CURSOR constructor. e.g. "<code>SELECT * FROM
* TABLE(DEDUP(CURSOR(SELECT * FROM EMPS), 'name'))</code>".
*/
public static final SqlCursorConstructor CURSOR =
new SqlCursorConstructor();
/**
* The COLUMN_LIST constructor. e.g. the ROW() call in "<code>SELECT * FROM
* TABLE(DEDUP(CURSOR(SELECT * FROM EMPS), ROW(name, empno)))</code>".
*/
public static final SqlColumnListConstructor COLUMN_LIST =
new SqlColumnListConstructor();
/**
* The <code>UNNEST</code> operator.
*/
public static final SqlUnnestOperator UNNEST =
new SqlUnnestOperator(false);
/**
* The <code>UNNEST WITH ORDINALITY</code> operator.
*/
public static final SqlUnnestOperator UNNEST_WITH_ORDINALITY =
new SqlUnnestOperator(true);
/**
* The <code>LATERAL</code> operator.
*/
public static final SqlSpecialOperator LATERAL =
new SqlFunctionalOperator(
"LATERAL",
SqlKind.LATERAL,
200,
true,
ReturnTypes.ARG0,
null,
OperandTypes.ANY);
/**
* The "table function derived table" operator, which a table-valued
* function into a relation, e.g. "<code>SELECT * FROM
* TABLE(ramp(5))</code>".
*
* <p>This operator has function syntax (with one argument), whereas
* {@link #EXPLICIT_TABLE} is a prefix operator.
*/
public static final SqlSpecialOperator COLLECTION_TABLE =
new SqlCollectionTableOperator("TABLE", SqlModality.RELATION);
public static final SqlOverlapsOperator OVERLAPS =
new SqlOverlapsOperator();
public static final SqlSpecialOperator VALUES =
new SqlValuesOperator();
public static final SqlLiteralChainOperator LITERAL_CHAIN =
new SqlLiteralChainOperator();
public static final SqlThrowOperator THROW = new SqlThrowOperator();
public static final SqlBetweenOperator BETWEEN =
new SqlBetweenOperator(
SqlBetweenOperator.Flag.ASYMMETRIC,
false);
public static final SqlBetweenOperator SYMMETRIC_BETWEEN =
new SqlBetweenOperator(
SqlBetweenOperator.Flag.SYMMETRIC,
false);
public static final SqlBetweenOperator NOT_BETWEEN =
new SqlBetweenOperator(
SqlBetweenOperator.Flag.ASYMMETRIC,
true);
public static final SqlBetweenOperator SYMMETRIC_NOT_BETWEEN =
new SqlBetweenOperator(
SqlBetweenOperator.Flag.SYMMETRIC,
true);
public static final SqlSpecialOperator NOT_LIKE =
new SqlLikeOperator("NOT LIKE", SqlKind.LIKE, true);
public static final SqlSpecialOperator LIKE =
new SqlLikeOperator("LIKE", SqlKind.LIKE, false);
public static final SqlSpecialOperator NOT_SIMILAR_TO =
new SqlLikeOperator("NOT SIMILAR TO", SqlKind.SIMILAR, true);
public static final SqlSpecialOperator SIMILAR_TO =
new SqlLikeOperator("SIMILAR TO", SqlKind.SIMILAR, false);
/**
* Internal operator used to represent the ESCAPE clause of a LIKE or
* SIMILAR TO expression.
*/
public static final SqlSpecialOperator ESCAPE =
new SqlSpecialOperator("Escape", SqlKind.ESCAPE, 30);
public static final SqlCaseOperator CASE = SqlCaseOperator.INSTANCE;
public static final SqlOperator PROCEDURE_CALL =
new SqlProcedureCallOperator();
public static final SqlOperator NEW = new SqlNewOperator();
/**
* The <code>OVER</code> operator, which applies an aggregate functions to a
* {@link SqlWindow window}.
*
* <p>Operands are as follows:
*
* <ol>
* <li>name of window function ({@link org.apache.calcite.sql.SqlCall})</li>
* <li>window name ({@link org.apache.calcite.sql.SqlLiteral}) or window
* in-line specification (@link SqlWindowOperator})</li>
* </ol>
*/
public static final SqlBinaryOperator OVER = new SqlOverOperator();
/**
* An <code>REINTERPRET</code> operator is internal to the planner. When the
* physical storage of two types is the same, this operator may be used to
* reinterpret values of one type as the other. This operator is similar to
* a cast, except that it does not alter the data value. Like a regular cast
* it accepts one operand and stores the target type as the return type. It
* performs an overflow check if it has <i>any</i> second operand, whether
* true or not.
*/
public static final SqlSpecialOperator REINTERPRET =
new SqlSpecialOperator("Reinterpret", SqlKind.REINTERPRET) {
public SqlOperandCountRange getOperandCountRange() {
return SqlOperandCountRanges.between(1, 2);
}
};
/** Internal operator that extracts time periods (year, month, date) from a
* date in internal format (number of days since epoch). */
public static final SqlSpecialOperator EXTRACT_DATE =
new SqlSpecialOperator("EXTRACT_DATE", SqlKind.OTHER);
//-------------------------------------------------------------
// FUNCTIONS
//-------------------------------------------------------------
/**
* The character substring function: <code>SUBSTRING(string FROM start [FOR
* length])</code>.
*
* <p>If the length parameter is a constant, the length of the result is the
* minimum of the length of the input and that length. Otherwise it is the
* length of the input.
*/
public static final SqlFunction SUBSTRING = new SqlSubstringFunction();
public static final SqlFunction CONVERT =
new SqlConvertFunction("CONVERT");
/**
* The <code>TRANSLATE(<i>char_value</i> USING <i>translation_name</i>)</code> function
* alters the character set of a string value from one base character set to another.
*
* <p>It is defined in the SQL standard. See also non-standard
* {@link OracleSqlOperatorTable#TRANSLATE3}.
*/
public static final SqlFunction TRANSLATE =
new SqlConvertFunction("TRANSLATE");
public static final SqlFunction OVERLAY = new SqlOverlayFunction();
/** The "TRIM" function. */
public static final SqlFunction TRIM = SqlTrimFunction.INSTANCE;
public static final SqlFunction POSITION = new SqlPositionFunction();
public static final SqlFunction CHAR_LENGTH =
new SqlFunction(
"CHAR_LENGTH",
SqlKind.OTHER_FUNCTION,
ReturnTypes.INTEGER_NULLABLE,
null,
OperandTypes.CHARACTER,
SqlFunctionCategory.NUMERIC);
public static final SqlFunction CHARACTER_LENGTH =
new SqlFunction(
"CHARACTER_LENGTH",
SqlKind.OTHER_FUNCTION,
ReturnTypes.INTEGER_NULLABLE,
null,
OperandTypes.CHARACTER,
SqlFunctionCategory.NUMERIC);
public static final SqlFunction UPPER =
new SqlFunction(
"UPPER",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0_NULLABLE,
null,
OperandTypes.CHARACTER,
SqlFunctionCategory.STRING);
public static final SqlFunction LOWER =
new SqlFunction(
"LOWER",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0_NULLABLE,
null,
OperandTypes.CHARACTER,
SqlFunctionCategory.STRING);
public static final SqlFunction INITCAP =
new SqlFunction(
"INITCAP",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0_NULLABLE,
null,
OperandTypes.CHARACTER,
SqlFunctionCategory.STRING);
/**
* Uses SqlOperatorTable.useDouble for its return type since we don't know
* what the result type will be by just looking at the operand types. For
* example POW(int, int) can return a non integer if the second operand is
* negative.
*/
public static final SqlFunction POWER =
new SqlFunction(
"POWER",
SqlKind.OTHER_FUNCTION,
ReturnTypes.DOUBLE_NULLABLE,
null,
OperandTypes.NUMERIC_NUMERIC,
SqlFunctionCategory.NUMERIC);
public static final SqlFunction SQRT =
new SqlFunction(
"SQRT",
SqlKind.OTHER_FUNCTION,
ReturnTypes.DOUBLE_NULLABLE,
null,
OperandTypes.NUMERIC,
SqlFunctionCategory.NUMERIC);
public static final SqlFunction MOD =
// Return type is same as divisor (2nd operand)
// SQL2003 Part2 Section 6.27, Syntax Rules 9
new SqlFunction(
"MOD",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG1_NULLABLE,
null,
OperandTypes.EXACT_NUMERIC_EXACT_NUMERIC,
SqlFunctionCategory.NUMERIC);
public static final SqlFunction LN =
new SqlFunction(
"LN",
SqlKind.OTHER_FUNCTION,
ReturnTypes.DOUBLE_NULLABLE,
null,
OperandTypes.NUMERIC,
SqlFunctionCategory.NUMERIC);
public static final SqlFunction LOG10 =
new SqlFunction(
"LOG10",
SqlKind.OTHER_FUNCTION,
ReturnTypes.DOUBLE_NULLABLE,
null,
OperandTypes.NUMERIC,
SqlFunctionCategory.NUMERIC);
public static final SqlFunction ABS =
new SqlFunction(
"ABS",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0,
null,
OperandTypes.NUMERIC_OR_INTERVAL,
SqlFunctionCategory.NUMERIC);
public static final SqlFunction EXP =
new SqlFunction(
"EXP",
SqlKind.OTHER_FUNCTION,
ReturnTypes.DOUBLE_NULLABLE,
null,
OperandTypes.NUMERIC,
SqlFunctionCategory.NUMERIC);
public static final SqlFunction NULLIF = new SqlNullifFunction();
/**
* The COALESCE builtin function.
*/
public static final SqlFunction COALESCE = new SqlCoalesceFunction();
/**
* The <code>FLOOR</code> function.
*/
public static final SqlFunction FLOOR = new SqlFloorFunction(SqlKind.FLOOR);
/**
* The <code>CEIL</code> function.
*/
public static final SqlFunction CEIL = new SqlFloorFunction(SqlKind.CEIL);
/**
* The <code>USER</code> function.
*/
public static final SqlFunction USER =
new SqlStringContextVariable("USER");
/**
* The <code>CURRENT_USER</code> function.
*/
public static final SqlFunction CURRENT_USER =
new SqlStringContextVariable("CURRENT_USER");
/**
* The <code>SESSION_USER</code> function.
*/
public static final SqlFunction SESSION_USER =
new SqlStringContextVariable("SESSION_USER");
/**
* The <code>SYSTEM_USER</code> function.
*/
public static final SqlFunction SYSTEM_USER =
new SqlStringContextVariable("SYSTEM_USER");
/**
* The <code>CURRENT_PATH</code> function.
*/
public static final SqlFunction CURRENT_PATH =
new SqlStringContextVariable("CURRENT_PATH");
/**
* The <code>CURRENT_ROLE</code> function.
*/
public static final SqlFunction CURRENT_ROLE =
new SqlStringContextVariable("CURRENT_ROLE");
/**
* The <code>CURRENT_CATALOG</code> function.
*/
public static final SqlFunction CURRENT_CATALOG =
new SqlStringContextVariable("CURRENT_CATALOG");
/**
* The <code>CURRENT_SCHEMA</code> function.
*/
public static final SqlFunction CURRENT_SCHEMA =
new SqlStringContextVariable("CURRENT_SCHEMA");
/**
* The <code>LOCALTIME [(<i>precision</i>)]</code> function.
*/
public static final SqlFunction LOCALTIME =
new SqlAbstractTimeFunction("LOCALTIME", SqlTypeName.TIME);
/**
* The <code>LOCALTIMESTAMP [(<i>precision</i>)]</code> function.
*/
public static final SqlFunction LOCALTIMESTAMP =
new SqlAbstractTimeFunction("LOCALTIMESTAMP", SqlTypeName.TIMESTAMP);
/**
* The <code>CURRENT_TIME [(<i>precision</i>)]</code> function.
*/
public static final SqlFunction CURRENT_TIME =
new SqlAbstractTimeFunction("CURRENT_TIME", SqlTypeName.TIME);
/**
* The <code>CURRENT_TIMESTAMP [(<i>precision</i>)]</code> function.
*/
public static final SqlFunction CURRENT_TIMESTAMP =
new SqlAbstractTimeFunction("CURRENT_TIMESTAMP", SqlTypeName.TIMESTAMP);
/**
* The <code>CURRENT_DATE</code> function.
*/
public static final SqlFunction CURRENT_DATE =
new SqlCurrentDateFunction();
/**
* <p>The <code>TIMESTAMPADD</code> function, which adds an interval to a
* timestamp.
*
* <p>The SQL syntax is
*
* <blockquote>
* <code>TIMESTAMPADD(<i>timestamp interval</i>, <i>quantity</i>, <i>timestamp</i>)</code>
* </blockquote>
*
* <p>The interval time unit can one of the following literals:<ul>
* <li>MICROSECOND (and synonyms SQL_TSI_MICROSECOND, FRAC_SECOND,
* SQL_TSI_FRAC_SECOND)
* <li>SECOND (and synonym SQL_TSI_SECOND)
* <li>MINUTE (and synonym SQL_TSI_MINUTE)
* <li>HOUR (and synonym SQL_TSI_HOUR)
* <li>DAY (and synonym SQL_TSI_DAY)
* <li>WEEK (and synonym SQL_TSI_WEEK)
* <li>MONTH (and synonym SQL_TSI_MONTH)
* <li>QUARTER (and synonym SQL_TSI_QUARTER)
* <li>YEAR (and synonym SQL_TSI_YEAR)
* </ul>
*
* <p>Returns modified timestamp.
*/
public static final SqlFunction TIMESTAMP_ADD =
new SqlFunction("TIMESTAMPADD", SqlKind.TIMESTAMP_ADD, ReturnTypes.ARG2,
null,
OperandTypes.family(SqlTypeFamily.ANY, SqlTypeFamily.INTEGER,
SqlTypeFamily.DATETIME), SqlFunctionCategory.TIMEDATE);
/**
* <p>The <code>TIMESTAMPDIFF</code> function, which calculates the difference
* between two timestamps.
*
* <p>The SQL syntax is
*
* <blockquote>
* <code>TIMESTAMPDIFF(<i>timestamp interval</i>, <i>timestamp</i>, <i>timestamp</i>)</code>
* </blockquote>
*
* <p>The interval time unit can one of the following literals:<ul>
* <li>MICROSECOND (and synonyms SQL_TSI_MICROSECOND, FRAC_SECOND,
* SQL_TSI_FRAC_SECOND)
* <li>SECOND (and synonym SQL_TSI_SECOND)
* <li>MINUTE (and synonym SQL_TSI_MINUTE)
* <li>HOUR (and synonym SQL_TSI_HOUR)
* <li>DAY (and synonym SQL_TSI_DAY)
* <li>WEEK (and synonym SQL_TSI_WEEK)
* <li>MONTH (and synonym SQL_TSI_MONTH)
* <li>QUARTER (and synonym SQL_TSI_QUARTER)
* <li>YEAR (and synonym SQL_TSI_YEAR)
* </ul>
*
* <p>Returns difference between two timestamps in indicated timestamp interval.
*/
public static final SqlFunction TIMESTAMP_DIFF =
new SqlFunction("TIMESTAMPDIFF", SqlKind.TIMESTAMP_DIFF,
ReturnTypes.INTEGER_NULLABLE, null,
OperandTypes.family(SqlTypeFamily.ANY, SqlTypeFamily.DATETIME,
SqlTypeFamily.DATETIME), SqlFunctionCategory.TIMEDATE);
/**
* Use of the <code>IN_FENNEL</code> operator forces the argument to be
* evaluated in Fennel. Otherwise acts as identity function.
*/
public static final SqlFunction IN_FENNEL =
new SqlMonotonicUnaryFunction(
"IN_FENNEL",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0,
null,
OperandTypes.ANY,
SqlFunctionCategory.SYSTEM);
/**
* The SQL <code>CAST</code> operator.
*
* <p>The SQL syntax is
*
* <blockquote><code>CAST(<i>expression</i> AS <i>type</i>)</code>
* </blockquote>
*
* <p>When the CAST operator is applies as a {@link SqlCall}, it has two
* arguments: the expression and the type. The type must not include a
* constraint, so <code>CAST(x AS INTEGER NOT NULL)</code>, for instance, is
* invalid.</p>
*
* <p>When the CAST operator is applied as a <code>RexCall</code>, the
* target type is simply stored as the return type, not an explicit operand.
* For example, the expression <code>CAST(1 + 2 AS DOUBLE)</code> will
* become a call to <code>CAST</code> with the expression <code>1 + 2</code>
* as its only operand.</p>
*
* <p>The <code>RexCall</code> form can also have a type which contains a
* <code>NOT NULL</code> constraint. When this expression is implemented, if
* the value is NULL, an exception will be thrown.</p>
*/
public static final SqlFunction CAST = new SqlCastFunction();
/**
* The SQL <code>EXTRACT</code> operator. Extracts a specified field value
* from a DATETIME or an INTERVAL. E.g.<br>
* <code>EXTRACT(HOUR FROM INTERVAL '364 23:59:59')</code> returns <code>
* 23</code>
*/
public static final SqlFunction EXTRACT = new SqlExtractFunction();
/**
* The SQL <code>QUARTER</code> operator. Returns the Quarter
* from a DATETIME E.g.<br>
* <code>QUARTER(date '2008-9-23')</code> returns <code>
* 3</code>
*/
public static final SqlQuarterFunction QUARTER = new SqlQuarterFunction();
/**
* The ELEMENT operator, used to convert a multiset with only one item to a
* "regular" type. Example ... log(ELEMENT(MULTISET[1])) ...
*/
public static final SqlFunction ELEMENT =
new SqlFunction(
"ELEMENT",
SqlKind.OTHER_FUNCTION,
ReturnTypes.MULTISET_ELEMENT_NULLABLE,
null,
OperandTypes.COLLECTION,
SqlFunctionCategory.SYSTEM);
/**
* The item operator {@code [ ... ]}, used to access a given element of an
* array or map. For example, {@code myArray[3]} or {@code "myMap['foo']"}.
*
* <p>The SQL standard calls the ARRAY variant a
* <array element reference>. Index is 1-based. The standard says
* to raise "data exception - array element error" but we currently return
* null.</p>
*
* <p>MAP is not standard SQL.</p>
*/
public static final SqlOperator ITEM = new SqlItemOperator();
/**
* The ARRAY Value Constructor. e.g. "<code>ARRAY[1, 2, 3]</code>".
*/
public static final SqlArrayValueConstructor ARRAY_VALUE_CONSTRUCTOR =
new SqlArrayValueConstructor();
/**
* The MAP Value Constructor,
* e.g. "<code>MAP['washington', 1, 'obama', 44]</code>".
*/
public static final SqlMapValueConstructor MAP_VALUE_CONSTRUCTOR =
new SqlMapValueConstructor();
/**
* The internal "$SLICE" operator takes a multiset of records and returns a
* multiset of the first column of those records.
*
* <p>It is introduced when multisets of scalar types are created, in order
* to keep types consistent. For example, <code>MULTISET [5]</code> has type
* <code>INTEGER MULTISET</code> but is translated to an expression of type
* <code>RECORD(INTEGER EXPR$0) MULTISET</code> because in our internal
* representation of multisets, every element must be a record. Applying the
* "$SLICE" operator to this result converts the type back to an <code>
* INTEGER MULTISET</code> multiset value.
*
* <p><code>$SLICE</code> is often translated away when the multiset type is
* converted back to scalar values.
*/
public static final SqlInternalOperator SLICE =
new SqlInternalOperator(
"$SLICE",
SqlKind.OTHER,
0,
false,
ReturnTypes.MULTISET_PROJECT0,
null,
OperandTypes.RECORD_COLLECTION) {
};
/**
* The internal "$ELEMENT_SLICE" operator returns the first field of the
* only element of a multiset.
*
* <p>It is introduced when multisets of scalar types are created, in order
* to keep types consistent. For example, <code>ELEMENT(MULTISET [5])</code>
* is translated to <code>$ELEMENT_SLICE(MULTISET (VALUES ROW (5
* EXPR$0))</code> It is translated away when the multiset type is converted
* back to scalar values.</p>
*
* <p>NOTE: jhyde, 2006/1/9: Usages of this operator are commented out, but
* I'm not deleting the operator, because some multiset tests are disabled,
* and we may need this operator to get them working!</p>
*/
public static final SqlInternalOperator ELEMENT_SLICE =
new SqlInternalOperator(
"$ELEMENT_SLICE",
SqlKind.OTHER,
0,
false,
ReturnTypes.MULTISET_RECORD,
null,
OperandTypes.MULTISET) {
public void unparse(
SqlWriter writer,
SqlCall call,
int leftPrec,
int rightPrec) {
SqlUtil.unparseFunctionSyntax(
this,
writer, call);
}
};
/**
* The internal "$SCALAR_QUERY" operator returns a scalar value from a
* record type. It assumes the record type only has one field, and returns
* that field as the output.
*/
public static final SqlInternalOperator SCALAR_QUERY =
new SqlInternalOperator(
"$SCALAR_QUERY",
SqlKind.SCALAR_QUERY,
0,
false,
ReturnTypes.RECORD_TO_SCALAR,
null,
OperandTypes.RECORD_TO_SCALAR) {
public void unparse(
SqlWriter writer,
SqlCall call,
int leftPrec,
int rightPrec) {
final SqlWriter.Frame frame = writer.startList("(", ")");
call.operand(0).unparse(writer, 0, 0);
writer.endList(frame);
}
public boolean argumentMustBeScalar(int ordinal) {
// Obvious, really.
return false;
}
};
/**
* The CARDINALITY operator, used to retrieve the number of elements in a
* MULTISET, ARRAY or MAP.
*/
public static final SqlFunction CARDINALITY =
new SqlFunction(
"CARDINALITY",
SqlKind.OTHER_FUNCTION,
ReturnTypes.INTEGER_NULLABLE,
null,
OperandTypes.COLLECTION_OR_MAP,
SqlFunctionCategory.SYSTEM);
/**
* The COLLECT operator. Multiset aggregator function.
*/
public static final SqlAggFunction COLLECT =
new SqlAggFunction("COLLECT",
null,
SqlKind.COLLECT,
ReturnTypes.TO_MULTISET,
null,
OperandTypes.ANY,
SqlFunctionCategory.SYSTEM, false, false) {
};
/**
* The FUSION operator. Multiset aggregator function.
*/
public static final SqlFunction FUSION =
new SqlAggFunction("FUSION", null,
SqlKind.FUSION,
ReturnTypes.ARG0,
null,
OperandTypes.MULTISET,
SqlFunctionCategory.SYSTEM, false, false) {
};
/**
* The sequence next value function: <code>NEXT VALUE FOR sequence</code>
*/
public static final SqlOperator NEXT_VALUE =
new SqlSequenceValueOperator(SqlKind.NEXT_VALUE);
/**
* The sequence current value function: <code>CURRENT VALUE FOR
* sequence</code>
*/
public static final SqlOperator CURRENT_VALUE =
new SqlSequenceValueOperator(SqlKind.CURRENT_VALUE);
/**
* The <code>TABLESAMPLE</code> operator.
*
* <p>Examples:
*
* <ul>
* <li><code><query> TABLESAMPLE SUBSTITUTE('sampleName')</code>
* (non-standard)
* <li><code><query> TABLESAMPLE BERNOULLI(<percent>)
* [REPEATABLE(<seed>)]</code> (standard, but not implemented for FTRS
* yet)
* <li><code><query> TABLESAMPLE SYSTEM(<percent>)
* [REPEATABLE(<seed>)]</code> (standard, but not implemented for FTRS
* yet)
* </ul>
*
* <p>Operand #0 is a query or table; Operand #1 is a {@link SqlSampleSpec}
* wrapped in a {@link SqlLiteral}.
*/
public static final SqlSpecialOperator TABLESAMPLE =
new SqlSpecialOperator(
"TABLESAMPLE",
SqlKind.TABLESAMPLE,
20,
true,
ReturnTypes.ARG0,
null,
OperandTypes.VARIADIC) {
public void unparse(
SqlWriter writer,
SqlCall call,
int leftPrec,
int rightPrec) {
call.operand(0).unparse(writer, leftPrec, 0);
writer.keyword("TABLESAMPLE");
call.operand(1).unparse(writer, 0, rightPrec);
}
};
//~ Methods ----------------------------------------------------------------
/**
* Returns the standard operator table, creating it if necessary.
*/
public static synchronized SqlStdOperatorTable instance() {
if (instance == null) {
// Creates and initializes the standard operator table.
// Uses two-phase construction, because we can't initialize the
// table until the constructor of the sub-class has completed.
instance = new SqlStdOperatorTable();
instance.init();
}
return instance;
}
}
// End SqlStdOperatorTable.java
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.testframework.ui;
import com.intellij.execution.testframework.TestConsoleProperties;
import com.intellij.execution.testframework.TestFrameworkPropertyListener;
import com.intellij.execution.testframework.TestTreeView;
import com.intellij.execution.testframework.ToolbarPanel;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowAnchor;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.OnePixelSplitter;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.SideBorder;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.CompoundBorder;
import java.awt.*;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
/**
* @author yole
*/
public abstract class TestResultsPanel extends JPanel implements Disposable, DataProvider {
private JScrollPane myLeftPane;
private JComponent myStatisticsComponent;
private Splitter myStatisticsSplitter;
protected final JComponent myConsole;
protected ToolbarPanel myToolbarPanel;
private final String mySplitterProportionProperty;
private final String myStatisticsSplitterProportionProperty;
private final float mySplitterDefaultProportion;
protected final AnAction[] myConsoleActions;
protected final TestConsoleProperties myProperties;
protected TestStatusLine myStatusLine;
private Splitter mySplitter;
protected TestResultsPanel(@NotNull JComponent console, AnAction[] consoleActions, TestConsoleProperties properties,
String splitterProportionProperty, float splitterDefaultProportion) {
super(new BorderLayout(0,1));
myConsole = console;
myConsoleActions = consoleActions;
myProperties = properties;
mySplitterProportionProperty = splitterProportionProperty;
mySplitterDefaultProportion = splitterDefaultProportion;
myStatisticsSplitterProportionProperty = mySplitterProportionProperty + "_Statistics";
}
public void initUI() {
myLeftPane = ScrollPaneFactory.createScrollPane();
myLeftPane.putClientProperty(UIUtil.KEEP_BORDER_SIDES, SideBorder.TOP);
myStatisticsComponent = createStatisticsPanel();
myStatusLine = createStatusLine();
JComponent testTreeView = createTestTreeView();
myToolbarPanel = createToolbarPanel();
Disposer.register(this, myToolbarPanel);
final String windowId = myProperties.getExecutor().getToolWindowId();
final ToolWindow toolWindow = ToolWindowManager.getInstance(myProperties.getProject()).getToolWindow(windowId);
boolean splitVertically = false;
if (toolWindow != null) {
final ToolWindowAnchor anchor = toolWindow.getAnchor();
splitVertically = anchor == ToolWindowAnchor.LEFT || anchor == ToolWindowAnchor.RIGHT;
}
myStatusLine.setPreferredSize(splitVertically);
mySplitter = createSplitter(mySplitterProportionProperty,
mySplitterDefaultProportion,
splitVertically);
Disposer.register(this, new Disposable(){
@Override
public void dispose() {
remove(mySplitter);
mySplitter.dispose();
}
});
add(mySplitter, BorderLayout.CENTER);
final JPanel leftPanel = new JPanel(new BorderLayout());
leftPanel.add(myLeftPane, BorderLayout.CENTER);
leftPanel.add(myToolbarPanel, BorderLayout.NORTH);
mySplitter.setFirstComponent(leftPanel);
myStatusLine.setMinimumSize(new Dimension(0, myStatusLine.getMinimumSize().height));
myStatusLine.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 5));
final JPanel rightPanel = new JPanel(new BorderLayout());
rightPanel.add(SameHeightPanel.wrap(myStatusLine, myToolbarPanel), BorderLayout.NORTH);
myStatisticsSplitter = createSplitter(myStatisticsSplitterProportionProperty, 0.5f, false);
myStatisticsSplitter.setFirstComponent(createOutputTab(myConsole, myConsoleActions));
if (Registry.is("tests.view.old.statistics.panel")) {
if (TestConsoleProperties.SHOW_STATISTICS.value(myProperties)) {
showStatistics();
}
myProperties.addListener(TestConsoleProperties.SHOW_STATISTICS, new TestFrameworkPropertyListener<Boolean>() {
@Override
public void onChanged(Boolean value) {
if (value.booleanValue()) {
showStatistics();
}
else {
myStatisticsSplitter.setSecondComponent(null);
}
}
});
}
rightPanel.add(myStatisticsSplitter, BorderLayout.CENTER);
mySplitter.setSecondComponent(rightPanel);
testTreeView.setBorder(BorderFactory.createEmptyBorder(0, 3, 0, 0));
setLeftComponent(testTreeView);
}
private void showStatistics() {
myStatisticsSplitter.setSecondComponent(myStatisticsComponent);
}
protected abstract JComponent createStatisticsPanel();
protected ToolbarPanel createToolbarPanel() {
return new ToolbarPanel(myProperties, this);
}
protected TestStatusLine createStatusLine() {
return new TestStatusLine();
}
protected abstract JComponent createTestTreeView();
@Nullable
protected TestTreeView getTreeView() {
return null;
}
@Nullable
@Override
public Object getData(@NonNls String dataId) {
final TestTreeView view = getTreeView();
if (view != null) {
return view.getData(dataId);
}
return null;
}
private JComponent createOutputTab(JComponent console,
AnAction[] consoleActions) {
JPanel outputTab = new JPanel(new BorderLayout());
console.setFocusable(true);
final Color editorBackground = EditorColorsManager.getInstance().getGlobalScheme().getDefaultBackground();
console.setBorder(new CompoundBorder(IdeBorderFactory.createBorder(SideBorder.RIGHT | SideBorder.TOP),
new SideBorder(editorBackground, SideBorder.LEFT)));
outputTab.add(console, BorderLayout.CENTER);
final ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, new DefaultActionGroup(consoleActions), false);
outputTab.add(toolbar.getComponent(), BorderLayout.EAST);
return outputTab;
}
@Override
public void dispose() {
}
protected static Splitter createSplitter(final String proportionProperty, final float defaultProportion, boolean splitVertically) {
final Splitter splitter = new OnePixelSplitter(splitVertically);
splitter.setHonorComponentsMinimumSize(true);
final PropertiesComponent propertiesComponent = PropertiesComponent.getInstance();
float proportion;
final String value = propertiesComponent.getValue(proportionProperty);
if (value != null) {
try {
proportion = Float.parseFloat(value);
}
catch (NumberFormatException e) {
proportion = defaultProportion;
}
}
else {
proportion = defaultProportion;
}
splitter.addPropertyChangeListener(new PropertyChangeListener() {
@Override
public void propertyChange(@NotNull final PropertyChangeEvent event) {
if (event.getPropertyName().equals(Splitter.PROP_PROPORTION)) {
propertiesComponent.setValue(proportionProperty, String.valueOf(splitter.getProportion()));
}
}
});
splitter.setProportion(proportion);
return splitter;
}
protected void setLeftComponent(final JComponent component) {
if (component != myLeftPane.getViewport().getView()) myLeftPane.setViewportView(component);
}
}
| |
/*
* Copyright 2003-2021 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.classlayout;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.ui.MultipleCheckboxOptionsPanel;
import com.intellij.codeInspection.util.SpecialAnnotationsUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.search.searches.ClassInheritorsSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.util.Query;
import com.siyeh.HardcodedMethodConstants;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.fixes.AddToIgnoreIfAnnotatedByListQuickFix;
import com.siyeh.ig.psiutils.UtilityClassUtil;
import com.siyeh.ig.ui.ExternalizableStringSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.ArrayList;
import java.util.List;
public class UtilityClassWithoutPrivateConstructorInspection extends BaseInspection {
@SuppressWarnings("PublicField")
public final ExternalizableStringSet ignorableAnnotations = new ExternalizableStringSet();
@SuppressWarnings("PublicField")
public boolean ignoreClassesWithOnlyMain = false;
@Override
@Nullable
public JComponent createOptionsPanel() {
final MultipleCheckboxOptionsPanel panel = new MultipleCheckboxOptionsPanel(this);
final JPanel annotationsPanel = SpecialAnnotationsUtil.createSpecialAnnotationsListControl(
ignorableAnnotations, InspectionGadgetsBundle.message("ignore.if.annotated.by"));
panel.add(annotationsPanel, "growx, wrap");
panel.addCheckbox(InspectionGadgetsBundle.message("utility.class.without.private.constructor.option"), "ignoreClassesWithOnlyMain");
return panel;
}
@Override
protected InspectionGadgetsFix @NotNull [] buildFixes(Object... infos) {
final List<InspectionGadgetsFix> fixes = new ArrayList<>();
final PsiClass aClass = (PsiClass)infos[0];
final PsiMethod constructor = getNullArgConstructor(aClass);
final boolean isOnTheFly = (boolean)infos[1];
if (constructor == null) {
if (isOnTheFly || !hasImplicitConstructorUsage(aClass)) {
fixes.add(new CreateEmptyPrivateConstructor());
}
}
else {
final Query<PsiReference> query = ReferencesSearch.search(constructor, constructor.getUseScope());
final PsiReference reference = query.findFirst();
if (reference == null) {
fixes.add(new MakeConstructorPrivateFix());
}
}
AddToIgnoreIfAnnotatedByListQuickFix.build(aClass, ignorableAnnotations, fixes);
return fixes.toArray(InspectionGadgetsFix.EMPTY_ARRAY);
}
@Override
@NotNull
protected String buildErrorString(Object... infos) {
return InspectionGadgetsBundle.message("utility.class.without.private.constructor.problem.descriptor");
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new UtilityClassWithoutPrivateConstructorVisitor();
}
private static boolean hasImplicitConstructorUsage(PsiClass aClass) {
final Query<PsiReference> query = ReferencesSearch.search(aClass, aClass.getUseScope());
for (PsiReference reference : query) {
if (reference == null) continue;
final PsiElement element = reference.getElement();
final PsiElement context = element.getParent();
if (context instanceof PsiNewExpression) {
return true;
}
}
return false;
}
@Nullable
static PsiMethod getNullArgConstructor(PsiClass aClass) {
final PsiMethod[] constructors = aClass.getConstructors();
for (final PsiMethod constructor : constructors) {
final PsiParameterList params = constructor.getParameterList();
if (params.isEmpty()) {
return constructor;
}
}
return null;
}
protected static class CreateEmptyPrivateConstructor extends InspectionGadgetsFix {
@Override
@NotNull
public String getFamilyName() {
return InspectionGadgetsBundle.message("utility.class.without.private.constructor.create.quickfix");
}
@Override
public void doFix(Project project, ProblemDescriptor descriptor) {
final PsiElement classNameIdentifier = descriptor.getPsiElement();
final PsiElement parent = classNameIdentifier.getParent();
if (!(parent instanceof PsiClass)) {
return;
}
final PsiClass aClass = (PsiClass)parent;
if (isOnTheFly() && hasImplicitConstructorUsage(aClass)) {
SwingUtilities.invokeLater(() -> Messages.showInfoMessage(
aClass.getProject(),
InspectionGadgetsBundle.message("utility.class.without.private.constructor.cant.generate.constructor.message"),
InspectionGadgetsBundle.message("utility.class.without.private.constructor.cant.generate.constructor.title")));
}
final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project);
final PsiElementFactory factory = psiFacade.getElementFactory();
final PsiMethod constructor = factory.createConstructor();
final PsiModifierList modifierList = constructor.getModifierList();
modifierList.setModifierProperty(PsiModifier.PRIVATE, true);
aClass.add(constructor);
final CodeStyleManager styleManager = CodeStyleManager.getInstance(project);
styleManager.reformat(constructor);
}
}
private static class MakeConstructorPrivateFix extends InspectionGadgetsFix {
@Override
@NotNull
public String getFamilyName() {
return InspectionGadgetsBundle.message("utility.class.without.private.constructor.make.quickfix");
}
@Override
public void doFix(Project project, ProblemDescriptor descriptor) {
final PsiElement classNameIdentifier = descriptor.getPsiElement();
final PsiElement parent = classNameIdentifier.getParent();
if (!(parent instanceof PsiClass)) {
return;
}
final PsiClass aClass = (PsiClass)parent;
final PsiMethod[] constructors = aClass.getConstructors();
for (final PsiMethod constructor : constructors) {
final PsiParameterList parameterList = constructor.getParameterList();
if (parameterList.isEmpty()) {
final PsiModifierList modifiers = constructor.getModifierList();
modifiers.setModifierProperty(PsiModifier.PUBLIC, false);
modifiers.setModifierProperty(PsiModifier.PROTECTED, false);
modifiers.setModifierProperty(PsiModifier.PRIVATE, true);
}
}
}
}
private class UtilityClassWithoutPrivateConstructorVisitor extends BaseInspectionVisitor {
@Override
public void visitClass(@NotNull PsiClass aClass) {
// no call to super, so that it doesn't drill down to inner classes
if (aClass.hasModifierProperty(PsiModifier.ABSTRACT)) {
return;
}
if (!UtilityClassUtil.isUtilityClass(aClass)) {
return;
}
if (ignoreClassesWithOnlyMain && hasOnlyMain(aClass)) {
return;
}
if (hasPrivateConstructor(aClass)) {
return;
}
if (AnnotationUtil.isAnnotated(aClass, ignorableAnnotations, 0)) {
return;
}
if (aClass.hasModifierProperty(PsiModifier.PRIVATE) && aClass.getConstructors().length == 0) {
return;
}
final SearchScope scope = GlobalSearchScope.projectScope(aClass.getProject());
final Query<PsiClass> query = ClassInheritorsSearch.search(aClass, scope, true);
final PsiClass subclass = query.findFirst();
if (subclass != null) {
return;
}
registerClassError(aClass, aClass, isOnTheFly());
}
private boolean hasOnlyMain(PsiClass aClass) {
final PsiMethod[] methods = aClass.getMethods();
if (methods.length == 0) {
return false;
}
for (PsiMethod method : methods) {
if (method.isConstructor()) {
continue;
}
if (!method.hasModifierProperty(PsiModifier.STATIC)) {
return false;
}
if (method.hasModifierProperty(PsiModifier.PRIVATE)) {
continue;
}
if (!method.hasModifierProperty(PsiModifier.PUBLIC)) {
return false;
}
final String name = method.getName();
if (!name.equals(HardcodedMethodConstants.MAIN)) {
return false;
}
final PsiType returnType = method.getReturnType();
if (!PsiType.VOID.equals(returnType)) {
return false;
}
final PsiParameterList parameterList = method.getParameterList();
if (parameterList.getParametersCount() != 1) {
return false;
}
final PsiParameter[] parameters = parameterList.getParameters();
final PsiParameter parameter = parameters[0];
final PsiType type = parameter.getType();
@NonNls final String stringArray = "java.lang.String[]";
if (!type.equalsToText(stringArray)) {
return false;
}
}
return true;
}
boolean hasPrivateConstructor(PsiClass aClass) {
final PsiMethod[] constructors = aClass.getConstructors();
for (final PsiMethod constructor : constructors) {
if (constructor.hasModifierProperty(PsiModifier.PRIVATE)) {
return true;
}
}
return false;
}
}
}
| |
package cz.sd2.cpdn.api.connector.tests.unit.resources;
import static org.junit.Assert.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.json.JSONException;
import org.junit.Before;
import org.junit.Test;
import cz.sd2.cpdn.api.connector.resources.Node;
public class NodeTest {
private Node n;
@Before
public void setUp() throws Exception {
this.n = new Node();
}
@Test
public void testNode() {
this.n = new Node();
assertNotNull(this.n);
this.n = new Node(null, null, null, null, null);
assertNotNull(this.n);
}
@Test
public void testGetSetId() {
assertSame(0, this.n.getId().intValue());
this.n.setId(1);
assertSame(1, this.n.getId().intValue());
}
@Test
public void testGetSetScheme() {
assertSame(0, this.n.getScheme().intValue());
this.n.setScheme(1);
assertSame(1, this.n.getScheme().intValue());
this.n.setScheme(null);
assertNull(this.n.getScheme());
}
@Test
public void testGetSetMapPoint() {
assertSame(0, this.n.getMapPoint().intValue());
this.n.setMapPoint(1);
assertSame(1, this.n.getMapPoint().intValue());
this.n.setMapPoint(null);
assertNull(this.n.getMapPoint());
}
@Test
public void testGetSetCalc() {
HashMap<String, Double> calc = new HashMap<String, Double>();
this.n.setCalc(calc);
assertSame(calc, this.n.getCalc());
}
@Test
public void testGetSetSpec() {
HashMap<String, Object> spec = new HashMap<String, Object>();
this.n.setSpec(spec);
assertSame(spec, this.n.getSpec());
}
@Test
public void testCreateJsonBody() {
this.n.setId(1);
this.n.setScheme(1);
this.n.setMapPoint(1);
Map<String, Double> calc = new HashMap<String, Double>();
calc.put("load.active", 0.0);
calc.put("load.reactive", 0.0);
calc.put("voltage.dropKv", 0.0);
calc.put("voltage.dropProc", 0.0);
calc.put("voltage.phase", 0.0);
calc.put("voltage.value", 0.0);
this.n.setCalc(calc);
Map<String, Object> spec = new HashMap<String, Object>();
spec.put("type", Node.TYPE_POWER);
spec.put("label", "lbl");
spec.put("cosFi", 0.0);
spec.put("mi", 0.0);
spec.put("lambda.max", 0.0);
spec.put("lambda.min", 0.0);
spec.put("power.active", 0.0);
spec.put("power.installed", 0.0);
spec.put("power.rated", 0.0);
spec.put("power.reactive", 0.0);
spec.put("reactance.longitudinal", 0.0);
spec.put("reactance.transverse", 0.0);
spec.put("voltage.level", 0.0);
spec.put("voltage.phase", 0.0);
spec.put("voltage.rated", 0.0);
spec.put("voltage.value", 0.0);
this.n.setSpec(spec);
String expected = "{\"mapPoint\":1,\"scheme\":1,\"calc\":{\"load\":{\"reactive\":0,\"active\":0},\"voltage\":{\"phase\":0,\"dropProc\":0,\"dropKv\":0,\"value\":0}}"
+ ",\"spec\":{\"lambda\":{\"min\":0,\"max\":0},\"reactance\":{\"transverse\":0,\"longitudinal\":0},\"label\":\"lbl\",\"power\":{\"rated\":0,\"installed\":0,"
+ "\"reactive\":0,\"active\":0},\"type\":\"power\",\"mi\":0,\"cosFi\":0,\"voltage\":{\"phase\":0,\"rated\":0,\"level\":0,\"value\":0}}}";
assertEquals(expected, this.n.createJsonBody());
}
@Test
public void testGetJsonCalcBody() {
Map<String, Double> calc = new HashMap<String, Double>();
calc.put("load.active", 0.0);
calc.put("load.reactive", 0.0);
calc.put("voltage.dropKv", 0.0);
calc.put("voltage.dropProc", 0.0);
calc.put("voltage.phase", 0.0);
calc.put("voltage.value", 0.0);
this.n.setCalc(calc);
assertEquals("{\"load\":{\"reactive\":0,\"active\":0},\"voltage\":{\"phase\":0,\"dropProc\":0,\"dropKv\":0,\"value\":0}}", this.n.getJsonCalcBody().toString());
}
@Test
public void testGetJsonSpecBody() {
Map<String, Object> spec = new HashMap<String, Object>();
spec.put("type", Node.TYPE_POWER);
spec.put("label", "lbl");
spec.put("cosFi", 0.0);
spec.put("mi", 0.0);
spec.put("lambda.max", 0.0);
spec.put("lambda.min", 0.0);
spec.put("power.active", 0.0);
spec.put("power.installed", 0.0);
spec.put("power.rated", 0.0);
spec.put("power.reactive", 0.0);
spec.put("reactance.longitudinal", 0.0);
spec.put("reactance.transverse", 0.0);
spec.put("voltage.level", 0.0);
spec.put("voltage.phase", 0.0);
spec.put("voltage.rated", 0.0);
spec.put("voltage.value", 0.0);
this.n.setSpec(spec);
assertEquals("{\"lambda\":{\"min\":0,\"max\":0},\"reactance\":{\"transverse\":0,\"longitudinal\":0},\"label\":\"lbl\",\"power\":{\"rated\":0,\"installed\":0,\"reactive\":0,\"active\":0},\"type\":\"power\",\"mi\":0,\"cosFi\":0,\"voltage\":{\"phase\":0,\"rated\":0,\"level\":0,\"value\":0}}", this.n.getJsonSpecBody().toString());
}
@Test
public void testGetJsonMapPointBody() {
this.n.setMapPoint(1);
assertEquals("{\"mapPoint\":1}", this.n.getJsonMapPointBody().toString());
}
@Test
public void testGetJsonSchemeBody() {
this.n.setScheme(1);
assertEquals("{\"scheme\":1}", this.n.getJsonSchemeBody().toString());
}
@Test
public void testBuildNode() {
String response = "{\"_meta\":{\"id\":\"1\"},\"calc\":{\"load\":{\"active\":\"0\",\"reactive\":\"0\"},\"voltage\":{\"dropKv\":\"0\",\"dropProc\":\"0\",\"phase\":\"0\",\"value\":\"0\"}},"
+ "\"spec\":{\"type\":\"power\",\"label\":null,\"cosFi\":\"0\",\"mi\":\"0\",\"lambda\":{\"max\":\"0\",\"min\":\"0\"},\"power\":{\"active\":\"0\""
+ ",\"installed\":\"0\",\"rated\":\"0\",\"reactive\":\"0\"},\"reactance\":{\"longitudinal\":\"0\",\"transverse\":\"0\"},\"voltage\":{\"level\":\"0\""
+ ",\"phase\":\"0\",\"rated\":\"0\",\"value\":\"0\"}},\"scheme\":{\"_meta\":{\"id\":\"1\"}},\"mapPoint\":{\"_meta\":{\"id\":\"1\"}}}";
this.n = Node.buildNode(response);
Map<String, Double> calc = new HashMap<String, Double>();
calc.put("load.active", 0.0);
calc.put("load.reactive", 0.0);
calc.put("voltage.dropKv", 0.0);
calc.put("voltage.dropProc", 0.0);
calc.put("voltage.phase", 0.0);
calc.put("voltage.value", 0.0);
Map<String, Object> spec = new HashMap<String, Object>();
spec.put("type", Node.TYPE_POWER);
spec.put("label", null);
spec.put("cosFi", 0.0);
spec.put("mi", 0.0);
spec.put("lambda.max", 0.0);
spec.put("lambda.min", 0.0);
spec.put("power.active", 0.0);
spec.put("power.installed", 0.0);
spec.put("power.rated", 0.0);
spec.put("power.reactive", 0.0);
spec.put("reactance.longitudinal", 0.0);
spec.put("reactance.transverse", 0.0);
spec.put("voltage.level", 0.0);
spec.put("voltage.phase", 0.0);
spec.put("voltage.rated", 0.0);
spec.put("voltage.value", 0.0);
assertEquals(1, this.n.getId().intValue());
assertEquals(1, this.n.getMapPoint().intValue());
assertEquals(1, this.n.getScheme().intValue());
assertEquals(calc, this.n.getCalc());
assertEquals(spec, this.n.getSpec());
}
@Test(expected = JSONException.class)
public void testBuildNodeEmptyResponse() {
this.n = Node.buildNode("");
}
@Test(expected = JSONException.class)
public void testBuildNodeInvalidResponse() {
this.n = Node.buildNode("{}");
}
@Test
public void testBuildNodes() {
String response = "{\"items\":[{\"_meta\":{\"id\":\"1\"},\"calc\":{\"load\":{\"active\":\"0\",\"reactive\":\"0\"},\"voltage\":{\"dropKv\":\"0\",\"dropProc\":\"0\",\"phase\":\"0\",\"value\":\"0\"}},"
+ "\"spec\":{\"type\":\"power\",\"label\":null,\"cosFi\":\"0\",\"mi\":\"0\",\"lambda\":{\"max\":\"0\",\"min\":\"0\"},\"power\":{\"active\":\"0\""
+ ",\"installed\":\"0\",\"rated\":\"0\",\"reactive\":\"0\"},\"reactance\":{\"longitudinal\":\"0\",\"transverse\":\"0\"},\"voltage\":{\"level\":\"0\""
+ ",\"phase\":\"0\",\"rated\":\"0\",\"value\":\"0\"}},\"scheme\":{\"_meta\":{\"id\":\"1\"}},\"mapPoint\":{\"_meta\":{\"id\":\"1\"}}},"
+ "{\"_meta\":{\"id\":\"2\"},\"calc\":{\"load\":{\"active\":\"0\",\"reactive\":\"0\"},\"voltage\":{\"dropKv\":\"0\",\"dropProc\":\"0\",\"phase\":\"0\",\"value\":\"0\"}},"
+ "\"spec\":{\"type\":\"power\",\"label\":null,\"cosFi\":\"0\",\"mi\":\"0\",\"lambda\":{\"max\":\"0\",\"min\":\"0\"},\"power\":{\"active\":\"0\""
+ ",\"installed\":\"0\",\"rated\":\"0\",\"reactive\":\"0\"},\"reactance\":{\"longitudinal\":\"0\",\"transverse\":\"0\"},\"voltage\":{\"level\":\"0\""
+ ",\"phase\":\"0\",\"rated\":\"0\",\"value\":\"0\"}},\"scheme\":{\"_meta\":{\"id\":\"1\"}},\"mapPoint\":{\"_meta\":{\"id\":\"1\"}}}]}";
List<Node> n = Node.buildNodes(response);
assertEquals(1, n.get(0).getId().intValue());
assertEquals(1, n.get(0).getMapPoint().intValue());
assertEquals(1, n.get(0).getScheme().intValue());
assertEquals(2, n.get(1).getId().intValue());
assertEquals(1, n.get(1).getMapPoint().intValue());
assertEquals(1, n.get(1).getScheme().intValue());
}
@Test
public void testBuildCalcMap() {
Map<String, Double> expected = new HashMap<String, Double>();
expected.put("load.active", 0.0);
expected.put("load.reactive", 0.0);
expected.put("voltage.dropKv", 0.0);
expected.put("voltage.dropProc", 0.0);
expected.put("voltage.phase", 0.0);
expected.put("voltage.value", 0.0);
assertEquals(expected, Node.buildCalcMap(0.0, 0.0, 0.0, 0.0, 0.0, 0.0));
}
@Test
public void testBuildSpecMap() {
Map<String, Object> expected = new HashMap<String, Object>();
expected.put("type", Node.TYPE_POWER);
expected.put("label", "lbl");
expected.put("cosFi", 0.0);
expected.put("mi", 0.0);
expected.put("lambda.max", 0.0);
expected.put("lambda.min", 0.0);
expected.put("power.active", 0.0);
expected.put("power.installed", 0.0);
expected.put("power.rated", 0.0);
expected.put("power.reactive", 0.0);
expected.put("reactance.longitudinal", 0.0);
expected.put("reactance.transverse", 0.0);
expected.put("voltage.level", 0.0);
expected.put("voltage.phase", 0.0);
expected.put("voltage.rated", 0.0);
expected.put("voltage.value", 0.0);
assertEquals(expected, Node.buildSpecMap(Node.TYPE_POWER, "lbl", 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0));
}
}
| |
package com.tngtech.archunit.library.dependencies;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.Set;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.ContiguousSet;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Range;
import com.tngtech.archunit.ArchConfiguration;
import com.tngtech.archunit.library.dependencies.Graph.Cycles;
import org.junit.Test;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.DiscreteDomain.integers;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Sets.cartesianProduct;
import static com.tngtech.archunit.library.dependencies.CycleConfiguration.MAX_NUMBER_OF_CYCLES_TO_DETECT_PROPERTY_NAME;
import static java.util.Arrays.asList;
import static java.util.Collections.singleton;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
public class GraphTest {
private static final Random random = new Random();
@Test
public void graph_without_cycles() {
Graph<String, String> graph = new Graph<>();
graph.addNodes(asList(randomNode(), randomNode(), randomNode()));
assertThat(graph.findCycles()).isEmpty();
}
@Test
public void three_node_cycle_is_detected() {
Graph<String, String> graph = new Graph<>();
String nodeA = "Node-A";
String nodeB = "Node-B";
String nodeC = "Node-C";
graph.addNodes(asList(nodeA, nodeB, nodeC));
graph.addEdges(ImmutableSet.of(stringEdge(nodeA, nodeB), stringEdge(nodeB, nodeC), stringEdge(nodeC, nodeA)));
Cycle<String, String> cycle = getOnlyElement(graph.findCycles());
assertThat(cycle.getEdges()).hasSize(3);
assertEdgeExists(cycle, nodeA, nodeB);
assertEdgeExists(cycle, nodeB, nodeC);
assertEdgeExists(cycle, nodeC, nodeA);
}
@Test
public void sub_cycle_of_three_node_graph_is_detected() {
Graph<String, String> graph = new Graph<>();
String nodeA = "Node-A";
String nodeB = "Node-B";
String nodeC = "Node-C";
graph.addNodes(asList(nodeA, nodeB, nodeC));
graph.addEdges(ImmutableSet.of(stringEdge(nodeB, nodeA), stringEdge(nodeA, nodeB)));
Cycle<String, String> cycle = getOnlyElement(graph.findCycles());
assertThat(cycle.getEdges()).hasSize(2);
assertEdgeExists(cycle, nodeA, nodeB);
assertEdgeExists(cycle, nodeB, nodeA);
}
@Test
public void nested_cycles_are_detected() {
Graph<String, String> graph = new Graph<>();
String nodeA = "Node-A";
String nodeB = "Node-B";
String nodeC = "Node-C";
graph.addNodes(asList(nodeA, nodeB, nodeC));
graph.addEdges(ImmutableSet.of(stringEdge(nodeB, nodeA), stringEdge(nodeA, nodeB), stringEdge(nodeC, nodeA), stringEdge(nodeB, nodeC)));
assertThat(graph.findCycles()).hasSize(2);
}
@Test
public void multiple_cycles_are_detected() {
Graph<String, String> graph = new Graph<>();
Cycle<String, String> threeElements = randomCycle(3);
Cycle<String, String> fourElements = randomCycle(4);
Cycle<String, String> fiveElements = randomCycle(5);
addCycles(graph, threeElements, fourElements, fiveElements);
addCrossLink(graph, threeElements, fourElements);
addCrossLink(graph, fourElements, fiveElements);
Collection<Cycle<String, String>> cycles = graph.findCycles();
assertThat(cycles).containsOnly(threeElements, fourElements, fiveElements);
}
@Test
public void double_linked_three_node_cycle_results_in_five_cycles() {
Graph<String, String> graph = new Graph<>();
Cycle<String, String> threeElements = randomCycle(3);
addCycles(graph, threeElements);
for (Edge<String, String> edge : threeElements.getEdges()) {
graph.addEdges(singleEdge(edge.getTo(), edge.getFrom()));
}
assertThat(graph.findCycles()).hasSize(5);
}
@Test
public void complete_graph() {
Graph<Integer, Integer> completeGraph = createCompleteGraph(3);
Iterable<Cycle<Integer, Integer>> cycles = completeGraph.findCycles();
assertThat(cycles).containsOnly(createCycle(ImmutableList.of(0, 1, 2, 0)),
createCycle(ImmutableList.of(0, 2, 1, 0)),
createCycle(ImmutableList.of(0, 1, 0)),
createCycle(ImmutableList.of(1, 2, 1)),
createCycle(ImmutableList.of(2, 0, 2)));
}
@Test
public void graph_which_causes_error_when_dependently_blocked_nodes_are_not_cleared_after_unblocking() {
ImmutableSet<Integer> nodes = ImmutableSet.of(0, 1, 2, 3, 4, 5);
Graph<Integer, Object> graph = new Graph<>();
graph.addNodes(nodes);
graph.addEdges(ImmutableSet.of(
newEdge(0, 4),
newEdge(1, 0),
newEdge(1, 5),
newEdge(2, 1),
newEdge(3, 1),
newEdge(3, 5),
newEdge(4, 3),
newEdge(5, 1),
newEdge(5, 2)
));
assertThat(graph.findCycles()).isNotEmpty();
}
// This test covers some edge cases, e.g. if too many nodes stay blocked
@Test
public void finds_cycles_in_real_life_graph() {
Graph<Integer, Object> graph = RealLifeGraph.get();
int expectedNumberOfCycles = 10000;
ArchConfiguration.get().setProperty(MAX_NUMBER_OF_CYCLES_TO_DETECT_PROPERTY_NAME, String.valueOf(expectedNumberOfCycles));
Cycles<Integer, Object> cycles = graph.findCycles();
assertThat(cycles).hasSize(expectedNumberOfCycles);
assertThat(cycles.maxNumberOfCyclesReached()).as("maximum number of cycles reached").isTrue();
}
@SuppressWarnings("unchecked")
private Graph<Integer, Integer> createCompleteGraph(int n) {
ContiguousSet<Integer> integers = ContiguousSet.create(Range.closedOpen(0, n), integers());
Graph<Integer, Integer> graph = new Graph<>();
graph.addNodes(integers);
graph.addEdges(FluentIterable.from(cartesianProduct(integers, integers))
.filter(new Predicate<List<Integer>>() {
@Override
public boolean apply(List<Integer> input) {
return !input.get(0).equals(input.get(1));
}
})
.transform(new Function<List<Integer>, Edge<Integer, Integer>>() {
@Override
public Edge<Integer, Integer> apply(List<Integer> input) {
return integerEdge(input.get(0), input.get(1));
}
}).toSet());
return graph;
}
private Cycle<Integer, Integer> createCycle(List<Integer> numbers) {
ImmutableList.Builder<Edge<Integer, Integer>> builder = ImmutableList.builder();
for (int i = 0; i < numbers.size() - 1; i++) {
builder.add(integerEdge(numbers.get(i), numbers.get(i + 1)));
}
return new Cycle<>(builder.build());
}
private Cycle<String, String> randomCycle(int numberOfNodes) {
checkArgument(numberOfNodes > 1, "A cycle can't be formed by less than 2 nodes");
Path<String, String> path = new Path<>(singleEdgeList(randomNode(), randomNode()));
for (int i = 0; i < numberOfNodes - 2; i++) {
path.append(stringEdge(path.getEnd(), randomNode()));
}
return new Cycle<>(path.append(stringEdge(path.getEnd(), path.getStart())));
}
@SafeVarargs
private final void addCycles(Graph<String, String> graph, Cycle<String, String>... cycles) {
for (Cycle<String, String> cycle : cycles) {
for (Edge<String, String> edge : cycle.getEdges()) {
graph.addNodes(asList(edge.getFrom(), edge.getTo()));
}
graph.addEdges(cycle.getEdges());
}
}
private void addCrossLink(Graph<String, String> graph, Cycle<String, String> first, Cycle<String, String> second) {
Random rand = new Random();
String origin = first.getEdges().get(rand.nextInt(first.getEdges().size())).getFrom();
String target = second.getEdges().get(rand.nextInt(second.getEdges().size())).getFrom();
graph.addEdges(singleEdge(origin, target));
}
private static void assertEdgeExists(Cycle<?, ?> cycle, Object from, Object to) {
for (Edge<?, ?> edge : cycle.getEdges()) {
if (edge.getFrom().equals(from) && edge.getTo().equals(to)) {
return;
}
}
throw new AssertionError("Expected Cycle to contain an edge from " + from + " to " + to);
}
static String randomNode() {
return "" + random.nextLong() + System.nanoTime();
}
static Edge<String, String> stringEdge(String nodeA, String nodeB) {
return newEdge(nodeA, nodeB);
}
private Edge<Integer, Integer> integerEdge(Integer origin, Integer target) {
return newEdge(origin, target);
}
static List<Edge<String, String>> singleEdgeList(String from, String to) {
return singletonList(stringEdge(from, to));
}
static Set<Edge<String, String>> singleEdge(String from, String to) {
return singleton(stringEdge(from, to));
}
static <NODE, ATTACHMENT> Edge<NODE, ATTACHMENT> newEdge(NODE from, NODE to) {
return new Edge<>(from, to, Collections.<ATTACHMENT>emptySet());
}
}
| |
package org.jboss.resteasy.core;
import org.jboss.resteasy.resteasy_jaxrs.i18n.LogMessages;
import org.jboss.resteasy.spi.ApplicationException;
import org.jboss.resteasy.spi.Failure;
import org.jboss.resteasy.spi.HttpRequest;
import org.jboss.resteasy.spi.NoLogWebApplicationException;
import org.jboss.resteasy.spi.ReaderException;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
import org.jboss.resteasy.spi.UnhandledException;
import org.jboss.resteasy.spi.WriterException;
import org.jboss.resteasy.util.HttpResponseCodes;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import java.util.HashSet;
import java.util.Set;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class ExceptionHandler
{
protected ResteasyProviderFactory providerFactory;
protected Set<String> unwrappedExceptions = new HashSet<String>();
protected boolean mapperExecuted;
public ExceptionHandler(ResteasyProviderFactory providerFactory, Set<String> unwrappedExceptions)
{
this.providerFactory = providerFactory;
this.unwrappedExceptions = unwrappedExceptions;
}
public boolean isMapperExecuted()
{
return mapperExecuted;
}
/**
* If there exists an Exception mapper for exception, execute it, otherwise, do NOT recurse up class hierarchy
* of exception.
*
* @param exception
* @return
*/
public Response executeExactExceptionMapper(Throwable exception)
{
ExceptionMapper mapper = providerFactory.getExceptionMappers().get(exception.getClass());
if (mapper == null) return null;
mapperExecuted = true;
return mapper.toResponse(exception);
}
public Response executeExceptionMapperForClass(Throwable exception, Class clazz)
{
ExceptionMapper mapper = providerFactory.getExceptionMappers().get(clazz);
if (mapper == null) return null;
mapperExecuted = true;
return mapper.toResponse(exception);
}
protected Response handleApplicationException(HttpRequest request, ApplicationException e)
{
Response jaxrsResponse = null;
// See if there is a mapper for ApplicationException
if ((jaxrsResponse = executeExceptionMapperForClass(e, ApplicationException.class)) != null) {
return jaxrsResponse;
}
jaxrsResponse = unwrapException(request, e);
if (jaxrsResponse == null) {
throw new UnhandledException(e.getCause());
}
return jaxrsResponse;
}
/**
* Execute an ExceptionMapper if one exists for the given exception. Recurse to base class if not found
*
* @param exception
* @return true if an ExceptionMapper was found and executed
*/
public Response executeExceptionMapper(Throwable exception)
{
ExceptionMapper mapper = null;
Class causeClass = exception.getClass();
while (mapper == null) {
if (causeClass == null) break;
mapper = providerFactory.getExceptionMappers().get(causeClass);
if (mapper == null) causeClass = causeClass.getSuperclass();
}
if (mapper != null) {
mapperExecuted = true;
Response jaxrsResponse = mapper.toResponse(exception);
if (jaxrsResponse == null) {
jaxrsResponse = Response.status(204).build();
}
return jaxrsResponse;
}
return null;
}
protected Response unwrapException(HttpRequest request, Throwable e)
{
Response jaxrsResponse = null;
Throwable unwrappedException = e.getCause();
/*
* If the response property of the exception does not
* contain an entity and an exception mapping provider
* (see section 4.4) is available for
* WebApplicationException an implementation MUST use the
* provider to create a new Response instance, otherwise
* the response property is used directly.
*/
if (unwrappedException instanceof WebApplicationException) {
WebApplicationException wae = (WebApplicationException) unwrappedException;
if (wae.getResponse() != null && wae.getResponse().getEntity() != null) return wae.getResponse();
}
if ((jaxrsResponse = executeExceptionMapper(unwrappedException)) != null) {
return jaxrsResponse;
}
if (unwrappedException instanceof WebApplicationException) {
return handleWebApplicationException((WebApplicationException) unwrappedException);
}
else if (unwrappedException instanceof Failure) {
return handleFailure(request, (Failure) unwrappedException);
}
else {
if (unwrappedExceptions.contains(unwrappedException.getClass().getName()) && unwrappedException.getCause() != null) {
return unwrapException(request, unwrappedException);
}
else {
return null;
}
}
}
protected Response handleFailure(HttpRequest request, Failure failure)
{
if (failure.isLoggable())
LogMessages.LOGGER.failedExecutingError(request.getHttpMethod(), request.getUri().getPath(), failure);
else LogMessages.LOGGER.failedExecutingDebug(request.getHttpMethod(), request.getUri().getPath(), failure);
if (failure.getResponse() != null) {
return failure.getResponse();
}
else {
Response.ResponseBuilder builder = Response.status(failure.getErrorCode());
if (failure.getMessage() != null) builder.type(MediaType.TEXT_HTML).entity(failure.getMessage());
return builder.build();
}
}
protected Response handleWriterException(HttpRequest request, WriterException e)
{
Response jaxrsResponse = null;
// See if there is a general mapper for WriterException
if ((jaxrsResponse = executeExceptionMapperForClass(e, WriterException.class)) != null) {
return jaxrsResponse;
}
if (e.getResponse() != null || e.getErrorCode() > -1) {
return handleFailure(request, e);
}
else if (e.getCause() != null) {
if ((jaxrsResponse = unwrapException(request, e)) != null) return jaxrsResponse;
}
e.setErrorCode(HttpResponseCodes.SC_INTERNAL_SERVER_ERROR);
return handleFailure(request, e);
}
protected Response handleReaderException(HttpRequest request, ReaderException e)
{
Response jaxrsResponse = null;
// See if there is a general mapper for ReaderException
if ((jaxrsResponse = executeExceptionMapperForClass(e, ReaderException.class)) != null) {
return jaxrsResponse;
}
if (e.getResponse() != null || e.getErrorCode() > -1) {
return handleFailure(request, e);
}
else if (e.getCause() != null) {
if ((jaxrsResponse = unwrapException(request, e)) != null) return jaxrsResponse;
}
e.setErrorCode(HttpResponseCodes.SC_BAD_REQUEST);
return handleFailure(request, e);
}
protected Response handleWebApplicationException(WebApplicationException wae)
{
if (wae instanceof NotFoundException)
{
LogMessages.LOGGER.failedToExecuteDebug(wae);
}
else if (!(wae instanceof NoLogWebApplicationException))
{
LogMessages.LOGGER.failedToExecute(wae);
}
return wae.getResponse();
}
public Response handleException(HttpRequest request, Throwable e)
{
Response jaxrsResponse = null;
// See if there is an ExceptionMapper for the exact class of the exception instance being thrown
if ((jaxrsResponse = executeExactExceptionMapper(e)) != null) return jaxrsResponse;
// These are wrapper exceptions so they need to be processed first as they map e.getCause()
if (e instanceof ApplicationException) {
return handleApplicationException(request, (ApplicationException) e);
}
else if (e instanceof WriterException) {
return handleWriterException(request, (WriterException) e);
}
else if (e instanceof ReaderException) {
return handleReaderException(request, (ReaderException) e);
}
/*
* If the response property of the exception does not
* contain an entity and an exception mapping provider
* (see section 4.4) is available for
* WebApplicationException an implementation MUST use the
* provider to create a new Response instance, otherwise
* the response property is used directly.
*/
if (e instanceof WebApplicationException) {
WebApplicationException wae = (WebApplicationException) e;
if (wae.getResponse() != null && wae.getResponse().getEntity() != null) return wae.getResponse();
}
// First try and handle it with a mapper
if ((jaxrsResponse = executeExceptionMapper(e)) != null) {
return jaxrsResponse;
}
// Otherwise do specific things
else if (e instanceof WebApplicationException) {
return handleWebApplicationException((WebApplicationException) e);
}
else if (e instanceof Failure) {
return handleFailure(request, (Failure) e);
}
else {
LogMessages.LOGGER.unknownException(request.getHttpMethod(), request.getUri().getPath(), e);
throw new UnhandledException(e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.hadoop.impl.v2;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobContextImpl;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapreduce.JobSubmissionFiles;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.split.JobSplit;
import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.processors.hadoop.HadoopClassLoader;
import org.apache.ignite.internal.processors.hadoop.HadoopCommonUtils;
import org.apache.ignite.internal.processors.hadoop.HadoopDefaultJobInfo;
import org.apache.ignite.internal.processors.hadoop.HadoopExternalSplit;
import org.apache.ignite.internal.processors.hadoop.HadoopFileBlock;
import org.apache.ignite.internal.processors.hadoop.HadoopHelper;
import org.apache.ignite.internal.processors.hadoop.HadoopInputSplit;
import org.apache.ignite.internal.processors.hadoop.HadoopJob;
import org.apache.ignite.internal.processors.hadoop.HadoopJobId;
import org.apache.ignite.internal.processors.hadoop.HadoopJobInfo;
import org.apache.ignite.internal.processors.hadoop.HadoopJobProperty;
import org.apache.ignite.internal.processors.hadoop.HadoopTaskContext;
import org.apache.ignite.internal.processors.hadoop.HadoopTaskInfo;
import org.apache.ignite.internal.processors.hadoop.HadoopTaskType;
import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemsUtils;
import org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopLazyConcurrentMap;
import org.apache.ignite.internal.processors.hadoop.impl.v1.HadoopV1Splitter;
import org.apache.ignite.internal.util.future.GridFutureAdapter;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.T2;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentHashMap8;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import java.util.Queue;
import java.util.UUID;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentMap;
import static org.apache.ignite.internal.processors.hadoop.HadoopJobProperty.JOB_SHARED_CLASSLOADER;
import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.jobLocalDir;
import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.taskLocalDir;
import static org.apache.ignite.internal.processors.hadoop.impl.HadoopUtils.transformException;
import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.FsCacheKey;
import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.createHadoopLazyConcurrentMap;
import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopFileSystemCacheUtils.fileSystemForMrUserWithCaching;
/**
* Hadoop job implementation for v2 API.
*/
public class HadoopV2Job implements HadoopJob {
/** */
private final JobConf jobConf;
/** */
private final JobContextImpl jobCtx;
/** */
private final HadoopHelper helper;
/** Hadoop job ID. */
private final HadoopJobId jobId;
/** Job info. */
protected final HadoopJobInfo jobInfo;
/** Native library names. */
private final String[] libNames;
/** */
private final JobID hadoopJobID;
/** */
private final HadoopV2JobResourceManager rsrcMgr;
/** */
private final ConcurrentMap<T2<HadoopTaskType, Integer>, GridFutureAdapter<HadoopTaskContext>> ctxs =
new ConcurrentHashMap8<>();
/** Pooling task context class and thus class loading environment. */
private final Queue<Class<? extends HadoopTaskContext>> taskCtxClsPool = new ConcurrentLinkedQueue<>();
/** All created contexts. */
private final Queue<Class<? extends HadoopTaskContext>> fullCtxClsQueue = new ConcurrentLinkedDeque<>();
/** File system cache map. */
private final HadoopLazyConcurrentMap<FsCacheKey, FileSystem> fsMap = createHadoopLazyConcurrentMap();
/** Shared class loader. */
private volatile HadoopClassLoader sharedClsLdr;
/** Local node ID */
private volatile UUID locNodeId;
/** Serialized JobConf. */
private volatile byte[] jobConfData;
/**
* Constructor.
*
* @param jobId Job ID.
* @param jobInfo Job info.
* @param log Logger.
* @param libNames Optional additional native library names.
*/
public HadoopV2Job(HadoopJobId jobId, final HadoopDefaultJobInfo jobInfo, IgniteLogger log,
@Nullable String[] libNames, HadoopHelper helper) {
assert jobId != null;
assert jobInfo != null;
this.jobId = jobId;
this.jobInfo = jobInfo;
this.libNames = libNames;
this.helper = helper;
ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader());
try {
hadoopJobID = new JobID(jobId.globalId().toString(), jobId.localId());
jobConf = new JobConf();
HadoopFileSystemsUtils.setupFileSystems(jobConf);
for (Map.Entry<String,String> e : jobInfo.properties().entrySet())
jobConf.set(e.getKey(), e.getValue());
jobCtx = new JobContextImpl(jobConf, hadoopJobID);
rsrcMgr = new HadoopV2JobResourceManager(jobId, jobCtx, log, this);
}
finally {
HadoopCommonUtils.restoreContextClassLoader(oldLdr);
}
}
/** {@inheritDoc} */
@Override public HadoopJobId id() {
return jobId;
}
/** {@inheritDoc} */
@Override public HadoopJobInfo info() {
return jobInfo;
}
/** {@inheritDoc} */
@Override public Collection<HadoopInputSplit> input() throws IgniteCheckedException {
ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(jobConf.getClassLoader());
try {
String jobDirPath = jobConf.get(MRJobConfig.MAPREDUCE_JOB_DIR);
if (jobDirPath == null) { // Probably job was submitted not by hadoop client.
// Assume that we have needed classes and try to generate input splits ourself.
if (jobConf.getUseNewMapper())
return HadoopV2Splitter.splitJob(jobCtx);
else
return HadoopV1Splitter.splitJob(jobConf);
}
Path jobDir = new Path(jobDirPath);
try {
FileSystem fs = fileSystem(jobDir.toUri(), jobConf);
JobSplit.TaskSplitMetaInfo[] metaInfos = SplitMetaInfoReader.readSplitMetaInfo(hadoopJobID, fs, jobConf,
jobDir);
if (F.isEmpty(metaInfos))
throw new IgniteCheckedException("No input splits found.");
Path splitsFile = JobSubmissionFiles.getJobSplitFile(jobDir);
try (FSDataInputStream in = fs.open(splitsFile)) {
Collection<HadoopInputSplit> res = new ArrayList<>(metaInfos.length);
for (JobSplit.TaskSplitMetaInfo metaInfo : metaInfos) {
long off = metaInfo.getStartOffset();
String[] hosts = metaInfo.getLocations();
in.seek(off);
String clsName = Text.readString(in);
HadoopFileBlock block = HadoopV1Splitter.readFileBlock(clsName, in, hosts);
if (block == null)
block = HadoopV2Splitter.readFileBlock(clsName, in, hosts);
res.add(block != null ? block : new HadoopExternalSplit(hosts, off));
}
return res;
}
}
catch (Throwable e) {
if (e instanceof Error)
throw (Error)e;
else
throw transformException(e);
}
}
finally {
HadoopCommonUtils.restoreContextClassLoader(oldLdr);
}
}
/** {@inheritDoc} */
@SuppressWarnings({"unchecked", "MismatchedQueryAndUpdateOfCollection" })
@Override public HadoopTaskContext getTaskContext(HadoopTaskInfo info) throws IgniteCheckedException {
T2<HadoopTaskType, Integer> locTaskId = new T2<>(info.type(), info.taskNumber());
GridFutureAdapter<HadoopTaskContext> fut = ctxs.get(locTaskId);
if (fut != null)
return fut.get();
GridFutureAdapter<HadoopTaskContext> old = ctxs.putIfAbsent(locTaskId, fut = new GridFutureAdapter<>());
if (old != null)
return old.get();
Class<? extends HadoopTaskContext> cls = taskCtxClsPool.poll();
try {
if (cls == null) {
// If there is no pooled class, then load new one.
// Note that the classloader identified by the task it was initially created for,
// but later it may be reused for other tasks.
HadoopClassLoader ldr = sharedClsLdr != null ?
sharedClsLdr : createClassLoader(HadoopClassLoader.nameForTask(info, false));
cls = (Class<? extends HadoopTaskContext>)ldr.loadClass(HadoopV2TaskContext.class.getName());
fullCtxClsQueue.add(cls);
}
Constructor<?> ctr = cls.getConstructor(HadoopTaskInfo.class, HadoopJob.class,
HadoopJobId.class, UUID.class, DataInput.class);
if (jobConfData == null)
synchronized(jobConf) {
if (jobConfData == null) {
ByteArrayOutputStream buf = new ByteArrayOutputStream();
jobConf.write(new DataOutputStream(buf));
jobConfData = buf.toByteArray();
}
}
HadoopTaskContext res = (HadoopTaskContext)ctr.newInstance(info, this, jobId, locNodeId,
new DataInputStream(new ByteArrayInputStream(jobConfData)));
fut.onDone(res);
return res;
}
catch (Throwable e) {
IgniteCheckedException te = transformException(e);
fut.onDone(te);
if (e instanceof Error)
throw (Error)e;
throw te;
}
}
/** {@inheritDoc} */
@Override public void initialize(boolean external, UUID locNodeId) throws IgniteCheckedException {
assert locNodeId != null;
this.locNodeId = locNodeId;
ClassLoader oldLdr = HadoopCommonUtils.setContextClassLoader(getClass().getClassLoader());
try {
rsrcMgr.prepareJobEnvironment(!external, jobLocalDir(igniteWorkDirectory(), locNodeId, jobId));
if (HadoopJobProperty.get(jobInfo, JOB_SHARED_CLASSLOADER, true))
sharedClsLdr = createClassLoader(HadoopClassLoader.nameForJob(jobId));
}
finally {
HadoopCommonUtils.restoreContextClassLoader(oldLdr);
}
}
/** {@inheritDoc} */
@SuppressWarnings("ThrowFromFinallyBlock")
@Override public void dispose(boolean external) throws IgniteCheckedException {
try {
if (rsrcMgr != null && !external) {
File jobLocDir = jobLocalDir(igniteWorkDirectory(), locNodeId, jobId);
if (jobLocDir.exists())
U.delete(jobLocDir);
}
}
finally {
taskCtxClsPool.clear();
Throwable err = null;
// Stop the daemon threads that have been created
// with the task class loaders:
while (true) {
Class<? extends HadoopTaskContext> cls = fullCtxClsQueue.poll();
if (cls == null)
break;
try {
final ClassLoader ldr = cls.getClassLoader();
try {
// Stop Hadoop daemons for this *task*:
stopHadoopFsDaemons(ldr);
}
catch (Exception e) {
if (err == null)
err = e;
}
// Also close all the FileSystems cached in
// HadoopLazyConcurrentMap for this *task* class loader:
closeCachedTaskFileSystems(ldr);
}
catch (Throwable e) {
if (err == null)
err = e;
if (e instanceof Error)
throw (Error)e;
}
}
assert fullCtxClsQueue.isEmpty();
try {
// Close all cached file systems for this *Job*:
fsMap.close();
}
catch (Exception e) {
if (err == null)
err = e;
}
if (err != null)
throw U.cast(err);
}
}
/**
* Stops Hadoop Fs daemon threads.
* @param ldr The task ClassLoader to stop the daemons for.
* @throws Exception On error.
*/
private void stopHadoopFsDaemons(ClassLoader ldr) throws Exception {
Class<?> daemonCls = ldr.loadClass(HadoopClassLoader.CLS_DAEMON);
Method m = daemonCls.getMethod("dequeueAndStopAll");
m.invoke(null);
}
/**
* Closes all the file systems user by task
* @param ldr The task class loader.
* @throws Exception On error.
*/
private void closeCachedTaskFileSystems(ClassLoader ldr) throws Exception {
Class<?> clazz = ldr.loadClass(HadoopV2TaskContext.class.getName());
Method m = clazz.getMethod("close");
m.invoke(null);
}
/** {@inheritDoc} */
@Override public void prepareTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException {
rsrcMgr.prepareTaskWorkDir(taskLocalDir(igniteWorkDirectory(), locNodeId, info));
}
/** {@inheritDoc} */
@Override public void cleanupTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException {
HadoopTaskContext ctx = ctxs.remove(new T2<>(info.type(), info.taskNumber())).get();
taskCtxClsPool.add(ctx.getClass());
File locDir = taskLocalDir(igniteWorkDirectory(), locNodeId, info);
if (locDir.exists())
U.delete(locDir);
}
/** {@inheritDoc} */
@Override public void cleanupStagingDirectory() {
rsrcMgr.cleanupStagingDirectory();
}
/** {@inheritDoc} */
@Override public String igniteWorkDirectory() {
return helper.workDirectory();
}
/**
* Getter for job configuration.
* @return The job configuration.
*/
public JobConf jobConf() {
return jobConf;
}
/**
* Gets file system for this job.
* @param uri The uri.
* @param cfg The configuration.
* @return The file system.
* @throws IOException On error.
*/
public FileSystem fileSystem(@Nullable URI uri, Configuration cfg) throws IOException {
return fileSystemForMrUserWithCaching(uri, cfg, fsMap);
}
/**
* Create class loader with the given name.
*
* @param name Name.
* @return Class loader.
*/
private HadoopClassLoader createClassLoader(String name) {
return new HadoopClassLoader(rsrcMgr.classPath(), name, libNames, helper);
}
}
| |
/*
* Android SDK for Piwik
*
* @link https://github.com/piwik/piwik-android-sdk
* @license https://github.com/piwik/piwik-sdk-android/blob/master/LICENSE BSD-3 Clause
*/
package org.piwik.sdk;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.piwik.sdk.dispatcher.Dispatcher;
import org.piwik.sdk.dispatcher.Packet;
import org.robolectric.Robolectric;
import org.robolectric.annotation.Config;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
@SuppressWarnings("deprecation")
@Config(emulateSdk = 18, manifest = Config.NONE)
@RunWith(FullEnvTestRunner.class)
public class TestDispatcher {
public Tracker createTracker() throws MalformedURLException {
TestPiwikApplication app = (TestPiwikApplication) Robolectric.application;
return Piwik.getInstance(Robolectric.application).newTracker(app.getTrackerUrl(), app.getSiteId());
}
public Piwik getPiwik() {
return Piwik.getInstance(Robolectric.application);
}
@Before
public void setup() {
Piwik.getInstance(Robolectric.application).setDryRun(true);
Piwik.getInstance(Robolectric.application).setOptOut(false);
Piwik.getInstance(Robolectric.application).setDebug(false);
}
@Test
public void testSetTimeout() throws Exception {
Dispatcher dispatcher = createTracker().getDispatcher();
dispatcher.setTimeOut(100);
assertEquals(dispatcher.getTimeOut(), 100);
}
@Test
public void testForceDispatchTwice() throws Exception {
Dispatcher dispatcher = createTracker().getDispatcher();
dispatcher.setDispatchInterval(-1);
dispatcher.setTimeOut(20);
dispatcher.submit("url");
assertTrue(dispatcher.forceDispatch());
assertFalse(dispatcher.forceDispatch());
}
@Test
public void testDoPostFailed() throws Exception {
Dispatcher dispatcher = createTracker().getDispatcher();
dispatcher.setTimeOut(1);
assertFalse(dispatcher.dispatch(new Packet(null, null)));
assertFalse(dispatcher.dispatch(new Packet(new URL("http://test/?s=^test"), new JSONObject())));
}
@Test
public void testDoGetFailed() throws Exception {
Dispatcher dispatcher = createTracker().getDispatcher();
dispatcher.setTimeOut(1);
assertFalse(dispatcher.dispatch(new Packet(null)));
}
@Test
public void testUrlEncodeUTF8() throws Exception {
assertEquals(Dispatcher.urlEncodeUTF8((String) null), "");
}
@Test
public void testSessionStartRaceCondition() throws Exception {
for (int i = 0; i < 10; i++) {
Log.d("RaceConditionTest", (10 - i) + " race-condition tests to go.");
getPiwik().setDryRun(true);
final Tracker tracker = createTracker();
tracker.setDispatchInterval(0);
final int threadCount = 10;
final int queryCount = 3;
final List<String> createdEvents = Collections.synchronizedList(new ArrayList<String>());
launchTestThreads(tracker, threadCount, queryCount, createdEvents);
Thread.sleep(500);
checkForMIAs(threadCount * queryCount, createdEvents, tracker.getDispatcher().getDryRunOutput());
List<String> output = getFlattenedQueries(tracker.getDispatcher().getDryRunOutput());
for (String out : output) {
if (output.indexOf(out) == 0) {
assertTrue(out.contains("lang"));
assertTrue(out.contains("_idts"));
assertTrue(out.contains("new_visit"));
} else {
assertFalse(out.contains("lang"));
assertFalse(out.contains("_idts"));
assertFalse(out.contains("new_visit"));
}
}
}
}
@Test
public void testMultiThreadDispatch() throws Exception {
final Tracker tracker = createTracker();
tracker.setDispatchInterval(20);
final int threadCount = 20;
final int queryCount = 100;
final List<String> createdEvents = Collections.synchronizedList(new ArrayList<String>());
launchTestThreads(tracker, threadCount, queryCount, createdEvents);
checkForMIAs(threadCount * queryCount, createdEvents, tracker.getDispatcher().getDryRunOutput());
}
@Test
public void testForceDispatch() throws Exception {
final Tracker tracker = createTracker();
tracker.setDispatchInterval(-1);
final int threadCount = 10;
final int queryCount = 10;
final List<String> createdEvents = Collections.synchronizedList(new ArrayList<String>());
launchTestThreads(tracker, threadCount, queryCount, createdEvents);
Thread.sleep(500);
assertEquals(threadCount * queryCount, createdEvents.size());
assertEquals(0, tracker.getDispatcher().getDryRunOutput().size());
assertTrue(tracker.dispatch());
checkForMIAs(threadCount * queryCount, createdEvents, tracker.getDispatcher().getDryRunOutput());
}
@Test
public void testBatchDispatch() throws Exception {
final Tracker tracker = createTracker();
tracker.setDispatchInterval(1500);
final int threadCount = 5;
final int queryCount = 5;
final List<String> createdEvents = Collections.synchronizedList(new ArrayList<String>());
launchTestThreads(tracker, threadCount, queryCount, createdEvents);
Thread.sleep(1000);
assertEquals(threadCount * queryCount, createdEvents.size());
assertEquals(0, tracker.getDispatcher().getDryRunOutput().size());
Thread.sleep(1000);
checkForMIAs(threadCount * queryCount, createdEvents, tracker.getDispatcher().getDryRunOutput());
}
@Test
public void testRandomDispatchIntervals() throws Exception {
final Tracker tracker = createTracker();
final int threadCount = 10;
final int queryCount = 100;
final List<String> createdEvents = Collections.synchronizedList(new ArrayList<String>());
new Thread(new Runnable() {
@Override
public void run() {
try {
while (getFlattenedQueries(new ArrayList<>(tracker.getDispatcher().getDryRunOutput())).size() != threadCount * queryCount)
tracker.setDispatchInterval(new Random().nextInt(20 - -1) + -1);
} catch (Exception e) {
e.printStackTrace();
}
}
}).start();
launchTestThreads(tracker, threadCount, queryCount, createdEvents);
checkForMIAs(threadCount * queryCount, createdEvents, tracker.getDispatcher().getDryRunOutput());
}
public static void checkForMIAs(int expectedEvents, List<String> createdEvents, List<Packet> dryRunOutput) throws Exception {
int previousEventCount = 0;
int previousFlatQueryCount = 0;
List<String> flattenedQueries;
while (true) {
Thread.sleep(500);
flattenedQueries = getFlattenedQueries(new ArrayList<>(dryRunOutput));
Log.d("checkForMIAs", createdEvents.size() + " events created, " + dryRunOutput.size() + " requests dispatched, containing " + flattenedQueries.size() + " flattened queries");
if (flattenedQueries.size() == expectedEvents) {
break;
} else {
int currentEventCount = createdEvents.size();
int currentFlatQueryCount = flattenedQueries.size();
assertNotEquals(previousEventCount, currentEventCount);
assertNotEquals(previousFlatQueryCount, currentFlatQueryCount);
previousEventCount = currentEventCount;
previousFlatQueryCount = currentFlatQueryCount;
}
}
assertEquals(flattenedQueries.size(), expectedEvents);
assertEquals(createdEvents.size(), expectedEvents);
// We are done, lets make sure can find all send queries in our dispatched results
while (!createdEvents.isEmpty()) {
String query = createdEvents.remove(0);
assertTrue(flattenedQueries.remove(query));
}
assertTrue(createdEvents.isEmpty());
assertTrue(flattenedQueries.isEmpty());
Log.d("checkForMIAs", "All send queries are accounted for.");
}
public static void launchTestThreads(final Tracker tracker, int threadCount, final int queryCount, final List<String> createdQueries) {
Log.d("launchTestThreads", "Launching " + threadCount + " threads, " + queryCount + " queries each");
for (int i = 0; i < threadCount; i++) {
new Thread(new Runnable() {
@Override
public void run() {
try {
for (int j = 0; j < queryCount; j++) {
Thread.sleep(new Random().nextInt(20 - 0) + 0);
TrackMe trackMe = new TrackMe()
.set(QueryParams.EVENT_ACTION, UUID.randomUUID().toString())
.set(QueryParams.EVENT_CATEGORY, UUID.randomUUID().toString())
.set(QueryParams.EVENT_NAME, UUID.randomUUID().toString())
.set(QueryParams.EVENT_VALUE, j);
tracker.track(trackMe);
createdQueries.add(tracker.getAPIUrl().toString() + trackMe.build());
}
} catch (Exception e) {
e.printStackTrace();
assertFalse(true);
}
}
}).start();
}
Log.d("launchTestThreads", "All launched.");
}
public static List<String> getFlattenedQueries(List<Packet> packets) throws Exception {
List<String> flattenedQueries = new ArrayList<>();
for (Packet request : packets) {
if (request.getJSONObject() != null) {
JSONArray batchedRequests = request.getJSONObject().getJSONArray("requests");
for (int json = 0; json < batchedRequests.length(); json++) {
String unbatchedRequest = request.getTargetURL().toExternalForm() + batchedRequests.get(json).toString();
flattenedQueries.add(unbatchedRequest);
}
} else {
flattenedQueries.add(request.getTargetURL().toExternalForm());
}
}
return flattenedQueries;
}
}
| |
package io.dropwizard.util;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import java.io.Serializable;
import java.util.Collections;
import java.util.Locale;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static java.util.Objects.requireNonNull;
/**
* A data size with SI or IEC prefix, such as "128KB" or "5 Gibibytes".
* This class models a size in terms of bytes and is immutable and thread-safe.
*
* @see DataSizeUnit
* @since 2.0
*/
public class DataSize implements Comparable<DataSize>, Serializable {
private static final long serialVersionUID = 8517642678733072800L;
private static final Pattern SIZE_PATTERN = Pattern.compile("(\\d+)\\s*(\\S*)");
private static final SortedMap<String, DataSizeUnit> SUFFIXES;
static {
final SortedMap<String, DataSizeUnit> suffixes = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
suffixes.put("B", DataSizeUnit.BYTES);
suffixes.put("byte", DataSizeUnit.BYTES);
suffixes.put("bytes", DataSizeUnit.BYTES);
suffixes.put("K", DataSizeUnit.KILOBYTES);
suffixes.put("KB", DataSizeUnit.KILOBYTES);
suffixes.put("KiB", DataSizeUnit.KIBIBYTES);
suffixes.put("kilobyte", DataSizeUnit.KILOBYTES);
suffixes.put("kibibyte", DataSizeUnit.KIBIBYTES);
suffixes.put("kilobytes", DataSizeUnit.KILOBYTES);
suffixes.put("kibibytes", DataSizeUnit.KIBIBYTES);
suffixes.put("M", DataSizeUnit.MEGABYTES);
suffixes.put("MB", DataSizeUnit.MEGABYTES);
suffixes.put("MiB", DataSizeUnit.MEBIBYTES);
suffixes.put("megabyte", DataSizeUnit.MEGABYTES);
suffixes.put("mebibyte", DataSizeUnit.MEBIBYTES);
suffixes.put("megabytes", DataSizeUnit.MEGABYTES);
suffixes.put("mebibytes", DataSizeUnit.MEBIBYTES);
suffixes.put("G", DataSizeUnit.GIGABYTES);
suffixes.put("GB", DataSizeUnit.GIGABYTES);
suffixes.put("GiB", DataSizeUnit.GIBIBYTES);
suffixes.put("gigabyte", DataSizeUnit.GIGABYTES);
suffixes.put("gibibyte", DataSizeUnit.GIBIBYTES);
suffixes.put("gigabytes", DataSizeUnit.GIGABYTES);
suffixes.put("gibibytes", DataSizeUnit.GIBIBYTES);
suffixes.put("T", DataSizeUnit.TERABYTES);
suffixes.put("TB", DataSizeUnit.TERABYTES);
suffixes.put("TiB", DataSizeUnit.TEBIBYTES);
suffixes.put("terabyte", DataSizeUnit.TERABYTES);
suffixes.put("tebibyte", DataSizeUnit.TEBIBYTES);
suffixes.put("terabytes", DataSizeUnit.TERABYTES);
suffixes.put("tebibytes", DataSizeUnit.TEBIBYTES);
suffixes.put("P", DataSizeUnit.PETABYTES);
suffixes.put("PB", DataSizeUnit.PETABYTES);
suffixes.put("PiB", DataSizeUnit.PEBIBYTES);
suffixes.put("petabyte", DataSizeUnit.PETABYTES);
suffixes.put("pebibyte", DataSizeUnit.PEBIBYTES);
suffixes.put("petabytes", DataSizeUnit.PETABYTES);
suffixes.put("pebibytes", DataSizeUnit.PEBIBYTES);
SUFFIXES = Collections.unmodifiableSortedMap(suffixes);
}
public static DataSize bytes(long count) {
return new DataSize(count, DataSizeUnit.BYTES);
}
public static DataSize kilobytes(long count) {
return new DataSize(count, DataSizeUnit.KILOBYTES);
}
public static DataSize megabytes(long count) {
return new DataSize(count, DataSizeUnit.MEGABYTES);
}
public static DataSize gigabytes(long count) {
return new DataSize(count, DataSizeUnit.GIGABYTES);
}
public static DataSize terabytes(long count) {
return new DataSize(count, DataSizeUnit.TERABYTES);
}
public static DataSize petabytes(long count) {
return new DataSize(count, DataSizeUnit.PETABYTES);
}
public static DataSize kibibytes(long count) {
return new DataSize(count, DataSizeUnit.KIBIBYTES);
}
public static DataSize mebibytes(long count) {
return new DataSize(count, DataSizeUnit.MEBIBYTES);
}
public static DataSize gibibytes(long count) {
return new DataSize(count, DataSizeUnit.GIBIBYTES);
}
public static DataSize tebibytes(long count) {
return new DataSize(count, DataSizeUnit.TEBIBYTES);
}
public static DataSize pebibytes(long count) {
return new DataSize(count, DataSizeUnit.PEBIBYTES);
}
@JsonCreator
public static DataSize parse(CharSequence size) {
return parse(size, DataSizeUnit.BYTES);
}
public static DataSize parse(CharSequence size, DataSizeUnit defaultUnit) {
final Matcher matcher = SIZE_PATTERN.matcher(size);
if (!matcher.matches()) {
throw new IllegalArgumentException("Invalid size: " + size);
}
final long count = Long.parseLong(matcher.group(1));
final String unit = matcher.group(2);
final DataSizeUnit dataSizeUnit = unit == null || unit.isEmpty() ? defaultUnit : SUFFIXES.get(unit);
if (dataSizeUnit == null) {
throw new IllegalArgumentException("Invalid size: " + size + ". Wrong size unit");
}
return new DataSize(count, dataSizeUnit);
}
private final long count;
private final DataSizeUnit unit;
private DataSize(long count, DataSizeUnit unit) {
this.count = count;
this.unit = requireNonNull(unit);
}
public long getQuantity() {
return count;
}
public DataSizeUnit getUnit() {
return unit;
}
public long toBytes() {
return DataSizeUnit.BYTES.convert(count, unit);
}
public long toKilobytes() {
return DataSizeUnit.KILOBYTES.convert(count, unit);
}
public long toMegabytes() {
return DataSizeUnit.MEGABYTES.convert(count, unit);
}
public long toGigabytes() {
return DataSizeUnit.GIGABYTES.convert(count, unit);
}
public long toTerabytes() {
return DataSizeUnit.TERABYTES.convert(count, unit);
}
public long toPetabytes() {
return DataSizeUnit.PETABYTES.convert(count, unit);
}
public long toKibibytes() {
return DataSizeUnit.KIBIBYTES.convert(count, unit);
}
public long toMebibytes() {
return DataSizeUnit.MEBIBYTES.convert(count, unit);
}
public long toGibibytes() {
return DataSizeUnit.GIBIBYTES.convert(count, unit);
}
public long toTebibytes() {
return DataSizeUnit.TEBIBYTES.convert(count, unit);
}
public long toPebibytes() {
return DataSizeUnit.PEBIBYTES.convert(count, unit);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if ((obj == null) || (getClass() != obj.getClass())) {
return false;
}
final DataSize size = (DataSize) obj;
return (count == size.count) && (unit == size.unit);
}
@Override
public int hashCode() {
return (31 * (int) (count ^ (count >>> 32))) + unit.hashCode();
}
@Override
@JsonValue
public String toString() {
String units = unit.toString().toLowerCase(Locale.ENGLISH);
if (count == 1L) {
units = units.substring(0, units.length() - 1);
}
return Long.toString(count) + ' ' + units;
}
@Override
public int compareTo(DataSize other) {
if (unit == other.unit) {
return Long.compare(count, other.count);
}
return Long.compare(toBytes(), other.toBytes());
}
/**
* Construct an equivalent {@link Size} object from this {@link DataSize}
*
* @deprecated {@link Size} is deprecated in favour of {@link DataSize}
*/
@Deprecated
public Size toSize() {
switch (unit) {
case BYTES:
return Size.bytes(count);
case KIBIBYTES:
return Size.kilobytes(count);
case MEBIBYTES:
return Size.megabytes(count);
case GIBIBYTES:
return Size.gigabytes(count);
case TEBIBYTES:
return Size.terabytes(count);
case PEBIBYTES:
return Size.terabytes(count * 1024L);
case KILOBYTES:
case MEGABYTES:
case GIGABYTES:
case TERABYTES:
case PETABYTES:
return Size.bytes(toBytes());
default:
throw new IllegalArgumentException("Unknown unit: " + getUnit());
}
}
/**
* Construct an equivalent {@link DataSize} object from a {@link Size} object
*
* @deprecated {@link Size} is deprecated in favour of {@link DataSize}
*/
@Deprecated
public static DataSize fromSize(Size size) {
switch (size.getUnit()) {
case BYTES:
return DataSize.bytes(size.toBytes());
case KILOBYTES:
return DataSize.kibibytes(size.toKilobytes());
case MEGABYTES:
return DataSize.mebibytes(size.toMegabytes());
case GIGABYTES:
return DataSize.gibibytes(size.toGigabytes());
case TERABYTES:
return DataSize.tebibytes(size.toTerabytes());
default:
throw new IllegalArgumentException("Unknown unit: " + size.getUnit());
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.geospatial;
import com.facebook.presto.RowPagesBuilder;
import com.facebook.presto.common.Page;
import com.facebook.presto.common.block.Block;
import com.facebook.presto.geospatial.KdbTree;
import com.facebook.presto.geospatial.KdbTreeUtils;
import com.facebook.presto.geospatial.Rectangle;
import com.facebook.presto.operator.Driver;
import com.facebook.presto.operator.DriverContext;
import com.facebook.presto.operator.InternalJoinFilterFunction;
import com.facebook.presto.operator.Operator;
import com.facebook.presto.operator.OperatorFactory;
import com.facebook.presto.operator.PagesIndex.TestingFactory;
import com.facebook.presto.operator.PagesSpatialIndex;
import com.facebook.presto.operator.PagesSpatialIndexFactory;
import com.facebook.presto.operator.SpatialIndexBuilderOperator.SpatialIndexBuilderOperatorFactory;
import com.facebook.presto.operator.SpatialIndexBuilderOperator.SpatialPredicate;
import com.facebook.presto.operator.SpatialJoinOperator.SpatialJoinOperatorFactory;
import com.facebook.presto.operator.StandardJoinFilterFunction;
import com.facebook.presto.operator.TaskContext;
import com.facebook.presto.operator.ValuesOperator;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.plan.PlanNodeId;
import com.facebook.presto.sql.gen.JoinFilterFunctionCompiler;
import com.facebook.presto.sql.planner.plan.SpatialJoinNode.Type;
import com.facebook.presto.testing.MaterializedResult;
import com.facebook.presto.testing.TestingTaskContext;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Ints;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicInteger;
import static com.facebook.airlift.concurrent.Threads.daemonThreadsNamed;
import static com.facebook.presto.RowPagesBuilder.rowPagesBuilder;
import static com.facebook.presto.SessionTestUtils.TEST_SESSION;
import static com.facebook.presto.common.type.DoubleType.DOUBLE;
import static com.facebook.presto.common.type.IntegerType.INTEGER;
import static com.facebook.presto.common.type.VarcharType.VARCHAR;
import static com.facebook.presto.operator.OperatorAssertion.assertOperatorEqualsIgnoreOrder;
import static com.facebook.presto.plugin.geospatial.GeoFunctions.spatialPartitions;
import static com.facebook.presto.plugin.geospatial.GeoFunctions.stGeometryFromText;
import static com.facebook.presto.plugin.geospatial.GeoFunctions.stPoint;
import static com.facebook.presto.plugin.geospatial.GeometryType.GEOMETRY;
import static com.facebook.presto.sql.planner.plan.SpatialJoinNode.Type.INNER;
import static com.facebook.presto.sql.planner.plan.SpatialJoinNode.Type.LEFT;
import static com.facebook.presto.testing.MaterializedResult.resultBuilder;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
@Test(singleThreaded = true)
public class TestSpatialJoinOperator
{
private static final KdbTree KDB_TREE = KdbTree.buildKdbTree(
2,
ImmutableList.of(
new Rectangle(-2, -2, -2, -2),
new Rectangle(0, 0, 0, 0),
new Rectangle(-1, -2, 4, 3),
new Rectangle(6, 1, 6, 1),
new Rectangle(3, 9, 3, 9),
new Rectangle(15, 15, 15, 15)));
private static final String KDB_TREE_JSON = KdbTreeUtils.toJson(KDB_TREE);
// 2 intersecting polygons: A and B
private static final Slice POLYGON_A = stGeometryFromText(Slices.utf8Slice("POLYGON ((0 0, -0.5 2.5, 0 5, 2.5 5.5, 5 5, 5.5 2.5, 5 0, 2.5 -0.5, 0 0))"));
private static final Slice POLYGON_B = stGeometryFromText(Slices.utf8Slice("POLYGON ((4 4, 3.5 7, 4 10, 7 10.5, 10 10, 10.5 7, 10 4, 7 3.5, 4 4))"));
private static final Slice POLYGON_C = stGeometryFromText(Slices.utf8Slice("POLYGON ((15 15, 15 14, 14 14, 14 15, 15 15))"));
private static final Slice POLYGON_D = stGeometryFromText(Slices.utf8Slice("POLYGON ((18 18, 18 19, 19 19, 19 18, 18 18))"));
// A set of points: X in A, Y in A and B, Z in B, W outside of A and B
private static final Slice POINT_X = stPoint(1, 1);
private static final Slice POINT_Y = stPoint(4.5, 4.5);
private static final Slice POINT_Z = stPoint(6, 6);
private static final Slice POINT_W = stPoint(20, 20);
private static final Slice POINT_V = stPoint(15, 15);
private static final Slice MULTIPOINT_U = stGeometryFromText(Slices.utf8Slice("MULTIPOINT (15 15)"));
private static final Slice MULTIPOINT_T = stGeometryFromText(Slices.utf8Slice("MULTIPOINT (14.5 14.5, 16 16)"));
private static final Slice POINT_S = stPoint(18, 18);
private static final Slice MULTIPOINT_R = stGeometryFromText(Slices.utf8Slice("MULTIPOINT (15 15, 19 19)"));
private static final Slice POINT_Q = stPoint(28, 28);
private ExecutorService executor;
private ScheduledExecutorService scheduledExecutor;
@BeforeMethod
public void setUp()
{
// Before/AfterMethod is chosen here because the executor needs to be shutdown
// after every single test case to terminate outstanding threads, if any.
// The line below is the same as newCachedThreadPool(daemonThreadsNamed(...)) except RejectionExecutionHandler.
// RejectionExecutionHandler is set to DiscardPolicy (instead of the default AbortPolicy) here.
// Otherwise, a large number of RejectedExecutionException will flood logging, resulting in Travis failure.
executor = new ThreadPoolExecutor(
0,
Integer.MAX_VALUE,
60L,
SECONDS,
new SynchronousQueue<>(),
daemonThreadsNamed("test-executor-%s"),
new ThreadPoolExecutor.DiscardPolicy());
scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed("test-scheduledExecutor-%s"));
}
@AfterMethod(alwaysRun = true)
public void tearDown()
{
executor.shutdownNow();
scheduledExecutor.shutdownNow();
}
@Test
public void testSpatialJoin()
{
TaskContext taskContext = createTaskContext();
RowPagesBuilder buildPages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR))
.row(POLYGON_A, "A")
.row(null, "null")
.pageBreak()
.row(POLYGON_B, "B")
.row(POLYGON_C, "C")
.row(POLYGON_D, "D");
RowPagesBuilder probePages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR))
.row(POINT_X, "x")
.row(null, "null")
.row(POINT_Y, "y")
.pageBreak()
.row(POINT_Z, "z")
.pageBreak()
.row(POINT_W, "w")
.row(POINT_V, "v")
.row(MULTIPOINT_U, "u")
.pageBreak()
.row(MULTIPOINT_T, "t")
.row(POINT_S, "s")
.row(MULTIPOINT_R, "r")
.row(POINT_Q, "q");
MaterializedResult expected = resultBuilder(taskContext.getSession(), ImmutableList.of(VARCHAR, VARCHAR))
.row("x", "A")
.row("y", "A")
.row("y", "B")
.row("z", "B")
.row("v", "C")
.row("u", "C")
.row("t", "C")
.row("s", "D")
.row("r", "C")
.row("r", "D")
.build();
assertSpatialJoin(taskContext, INNER, buildPages, probePages, expected);
}
@Test
public void testSpatialLeftJoin()
{
TaskContext taskContext = createTaskContext();
RowPagesBuilder buildPages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR))
.row(POLYGON_A, "A")
.row(null, "null")
.pageBreak()
.row(POLYGON_B, "B")
.row(POLYGON_C, "C")
.row(POLYGON_D, "D");
RowPagesBuilder probePages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR))
.row(POINT_X, "x")
.row(null, "null")
.row(POINT_Y, "y")
.pageBreak()
.row(POINT_Z, "z")
.pageBreak()
.row(POINT_W, "w")
.row(POINT_V, "v")
.row(MULTIPOINT_U, "u")
.pageBreak()
.row(MULTIPOINT_T, "t")
.row(POINT_S, "s")
.row(MULTIPOINT_R, "r")
.row(POINT_Q, "q");
MaterializedResult expected = resultBuilder(taskContext.getSession(), ImmutableList.of(VARCHAR, VARCHAR))
.row("x", "A")
.row("null", null)
.row("y", "A")
.row("y", "B")
.row("z", "B")
.row("w", null)
.row("v", "C")
.row("u", "C")
.row("t", "C")
.row("s", "D")
.row("r", "C")
.row("r", "D")
.row("q", null)
.build();
assertSpatialJoin(taskContext, LEFT, buildPages, probePages, expected);
}
private void assertSpatialJoin(TaskContext taskContext, Type joinType, RowPagesBuilder buildPages, RowPagesBuilder probePages, MaterializedResult expected)
{
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
PagesSpatialIndexFactory pagesSpatialIndexFactory = buildIndex(driverContext, (build, probe, r) -> build.intersects(probe), Optional.empty(), Optional.empty(), buildPages);
OperatorFactory joinOperatorFactory = new SpatialJoinOperatorFactory(2, new PlanNodeId("test"), joinType, probePages.getTypes(), Ints.asList(1), 0, Optional.empty(), pagesSpatialIndexFactory);
assertOperatorEqualsIgnoreOrder(joinOperatorFactory, driverContext, probePages.build(), expected);
}
@Test
public void testEmptyBuild()
{
TaskContext taskContext = createTaskContext();
RowPagesBuilder buildPages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR));
RowPagesBuilder probePages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR))
.row(POINT_X, "x")
.row(null, "null")
.row(POINT_Y, "y")
.pageBreak()
.row(POINT_Z, "z")
.pageBreak()
.row(POINT_W, "w");
MaterializedResult expected = resultBuilder(taskContext.getSession(), ImmutableList.of(VARCHAR, VARCHAR)).build();
assertSpatialJoin(taskContext, INNER, buildPages, probePages, expected);
}
@Test
public void testEmptyBuildLeftJoin()
{
TaskContext taskContext = createTaskContext();
RowPagesBuilder buildPages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR));
RowPagesBuilder probePages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR))
.row(POINT_X, "x")
.row(null, "null")
.row(POINT_Y, "y")
.pageBreak()
.row(POINT_Z, "z")
.pageBreak()
.row(POINT_W, "w");
MaterializedResult expected = resultBuilder(taskContext.getSession(), ImmutableList.of(VARCHAR, VARCHAR))
.row("x", null)
.row("null", null)
.row("y", null)
.row("z", null)
.row("w", null)
.build();
assertSpatialJoin(taskContext, LEFT, buildPages, probePages, expected);
}
@Test
public void testEmptyProbe()
{
TaskContext taskContext = createTaskContext();
RowPagesBuilder buildPages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR))
.row(POLYGON_A, "A")
.row(null, "null")
.pageBreak()
.row(POLYGON_B, "B");
RowPagesBuilder probePages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR));
MaterializedResult expected = resultBuilder(taskContext.getSession(), ImmutableList.of(VARCHAR, VARCHAR)).build();
assertSpatialJoin(taskContext, INNER, buildPages, probePages, expected);
}
@Test
public void testYield()
{
// create a filter function that yields for every probe match
// verify we will yield #match times totally
TaskContext taskContext = createTaskContext();
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
// force a yield for every match
AtomicInteger filterFunctionCalls = new AtomicInteger();
InternalJoinFilterFunction filterFunction = new TestInternalJoinFilterFunction((
(leftPosition, leftPage, rightPosition, rightPage) -> {
filterFunctionCalls.incrementAndGet();
driverContext.getYieldSignal().forceYieldForTesting();
return true;
}));
RowPagesBuilder buildPages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR))
.row(POLYGON_A, "A")
.pageBreak()
.row(POLYGON_B, "B");
PagesSpatialIndexFactory pagesSpatialIndexFactory = buildIndex(driverContext, (build, probe, r) -> build.contains(probe), Optional.empty(), Optional.of(filterFunction), buildPages);
// 10 points in polygon A (x0...x9)
// 10 points in polygons A and B (y0...y9)
// 10 points in polygon B (z0...z9)
// 40 total matches
RowPagesBuilder probePages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR));
for (int i = 0; i < 10; i++) {
probePages.row(stPoint(1 + 0.1 * i, 1 + 0.1 * i), "x" + i);
}
for (int i = 0; i < 10; i++) {
probePages.row(stPoint(4.5 + 0.01 * i, 4.5 + 0.01 * i), "y" + i);
}
for (int i = 0; i < 10; i++) {
probePages.row(stPoint(6 + 0.1 * i, 6 + 0.1 * i), "z" + i);
}
List<Page> probeInput = probePages.build();
OperatorFactory joinOperatorFactory = new SpatialJoinOperatorFactory(2, new PlanNodeId("test"), INNER, probePages.getTypes(), Ints.asList(1), 0, Optional.empty(), pagesSpatialIndexFactory);
Operator operator = joinOperatorFactory.createOperator(driverContext);
assertTrue(operator.needsInput());
operator.addInput(probeInput.get(0));
operator.finish();
// we will yield 40 times due to filterFunction
for (int i = 0; i < 40; i++) {
driverContext.getYieldSignal().setWithDelay(5 * SECONDS.toNanos(1), driverContext.getYieldExecutor());
assertNull(operator.getOutput());
assertEquals(filterFunctionCalls.get(), i + 1, "Expected join to stop processing (yield) after calling filter function once");
driverContext.getYieldSignal().reset();
}
// delayed yield is not going to prevent operator from producing a page now (yield won't be forced because filter function won't be called anymore)
driverContext.getYieldSignal().setWithDelay(5 * SECONDS.toNanos(1), driverContext.getYieldExecutor());
Page output = operator.getOutput();
assertNotNull(output);
// make sure we have 40 matches
assertEquals(output.getPositionCount(), 40);
}
@Test
public void testDistanceQuery()
{
TaskContext taskContext = createTaskContext();
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
RowPagesBuilder buildPages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR, DOUBLE))
.row(stPoint(0, 0), "0_0", 1.5)
.row(null, "null", 1.5)
.row(stPoint(1, 0), "1_0", 1.5)
.pageBreak()
.row(stPoint(3, 0), "3_0", 1.5)
.pageBreak()
.row(stPoint(10, 0), "10_0", 1.5);
PagesSpatialIndexFactory pagesSpatialIndexFactory = buildIndex(driverContext, (build, probe, r) -> build.distance(probe) <= r.getAsDouble(), Optional.of(2), Optional.empty(), buildPages);
RowPagesBuilder probePages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR))
.row(stPoint(0, 1), "0_1")
.row(null, "null")
.row(stPoint(1, 1), "1_1")
.pageBreak()
.row(stPoint(3, 1), "3_1")
.pageBreak()
.row(stPoint(10, 1), "10_1");
OperatorFactory joinOperatorFactory = new SpatialJoinOperatorFactory(2, new PlanNodeId("test"), INNER, probePages.getTypes(), Ints.asList(1), 0, Optional.empty(), pagesSpatialIndexFactory);
// Make sure that spatial index reference counting works with duplicate factories
joinOperatorFactory.duplicate().noMoreOperators();
MaterializedResult expected = resultBuilder(taskContext.getSession(), ImmutableList.of(VARCHAR, VARCHAR))
.row("0_1", "0_0")
.row("0_1", "1_0")
.row("1_1", "0_0")
.row("1_1", "1_0")
.row("3_1", "3_0")
.row("10_1", "10_0")
.build();
assertOperatorEqualsIgnoreOrder(joinOperatorFactory, driverContext, probePages.build(), expected);
}
@Test
public void testDistributedSpatialJoin()
{
TaskContext taskContext = createTaskContext();
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, true).addDriverContext();
RowPagesBuilder buildPages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR, INTEGER));
addGeometryPartitionRows(buildPages, POLYGON_A, "A");
buildPages.row(null, "null", null);
buildPages.pageBreak();
addGeometryPartitionRows(buildPages, POLYGON_B, "B");
addGeometryPartitionRows(buildPages, POLYGON_C, "C");
addGeometryPartitionRows(buildPages, POLYGON_D, "D");
RowPagesBuilder probePages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR, INTEGER));
addGeometryPartitionRows(probePages, POINT_X, "x");
probePages.row(null, "null", null);
addGeometryPartitionRows(probePages, POINT_Y, "y");
probePages.pageBreak();
addGeometryPartitionRows(probePages, POINT_Z, "z");
addGeometryPartitionRows(probePages, POINT_W, "w");
addGeometryPartitionRows(probePages, POINT_V, "v");
addGeometryPartitionRows(probePages, MULTIPOINT_U, "u");
probePages.pageBreak();
addGeometryPartitionRows(probePages, MULTIPOINT_T, "t");
addGeometryPartitionRows(probePages, POINT_S, "s");
addGeometryPartitionRows(probePages, MULTIPOINT_R, "r");
addGeometryPartitionRows(probePages, POINT_Q, "q");
MaterializedResult expected = resultBuilder(taskContext.getSession(), ImmutableList.of(VARCHAR, VARCHAR))
.row("x", "A")
.row("y", "A")
.row("y", "B")
.row("z", "B")
.row("v", "C")
.row("u", "C")
.row("t", "C")
.row("s", "D")
.row("r", "C")
.row("r", "D")
.build();
PagesSpatialIndexFactory pagesSpatialIndexFactory = buildIndex(driverContext, (build, probe, r) -> build.intersects(probe), Optional.empty(), Optional.of(2), Optional.of(KDB_TREE_JSON), Optional.empty(), buildPages);
OperatorFactory joinOperatorFactory = new SpatialJoinOperatorFactory(2, new PlanNodeId("test"), INNER, probePages.getTypes(), Ints.asList(1), 0, Optional.of(2), pagesSpatialIndexFactory);
assertOperatorEqualsIgnoreOrder(joinOperatorFactory, driverContext, probePages.build(), expected);
}
@Test
public void testDistributedSpatialSelfJoin()
{
TaskContext taskContext = createTaskContext();
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, true).addDriverContext();
RowPagesBuilder pages = rowPagesBuilder(ImmutableList.of(GEOMETRY, VARCHAR, INTEGER));
addGeometryPartitionRows(pages, POLYGON_A, "A");
pages.row(null, "null", null);
pages.pageBreak();
addGeometryPartitionRows(pages, POLYGON_B, "B");
MaterializedResult expected = resultBuilder(taskContext.getSession(), ImmutableList.of(VARCHAR, VARCHAR))
.row("A", "A")
.row("A", "B")
.row("B", "A")
.row("B", "B")
.build();
PagesSpatialIndexFactory pagesSpatialIndexFactory = buildIndex(driverContext, (build, probe, r) -> build.intersects(probe), Optional.empty(), Optional.of(2), Optional.of(KDB_TREE_JSON), Optional.empty(), pages);
OperatorFactory joinOperatorFactory = new SpatialJoinOperatorFactory(2, new PlanNodeId("test"), INNER, pages.getTypes(), Ints.asList(1), 0, Optional.of(2), pagesSpatialIndexFactory);
assertOperatorEqualsIgnoreOrder(joinOperatorFactory, driverContext, pages.build(), expected);
}
private void addGeometryPartitionRows(RowPagesBuilder pageBuilder, Slice geometry, String geometryName)
{
Block partitionIndices = spatialPartitions(KDB_TREE, geometry);
for (int position = 0; position < partitionIndices.getPositionCount(); position++) {
int partitionIndex = partitionIndices.getInt(position);
pageBuilder.row(geometry, geometryName, partitionIndex);
}
}
private PagesSpatialIndexFactory buildIndex(DriverContext driverContext, SpatialPredicate spatialRelationshipTest, Optional<Integer> radiusChannel, Optional<InternalJoinFilterFunction> filterFunction, RowPagesBuilder buildPages)
{
return buildIndex(driverContext, spatialRelationshipTest, radiusChannel, Optional.empty(), Optional.empty(), filterFunction, buildPages);
}
private PagesSpatialIndexFactory buildIndex(DriverContext driverContext, SpatialPredicate spatialRelationshipTest, Optional<Integer> radiusChannel, Optional<Integer> partitionChannel, Optional<String> kdbTreeJson, Optional<InternalJoinFilterFunction> filterFunction, RowPagesBuilder buildPages)
{
Optional<JoinFilterFunctionCompiler.JoinFilterFunctionFactory> filterFunctionFactory = filterFunction
.map(function -> (session, addresses, pages) -> new StandardJoinFilterFunction(function, addresses, pages));
ValuesOperator.ValuesOperatorFactory valuesOperatorFactory = new ValuesOperator.ValuesOperatorFactory(0, new PlanNodeId("test"), buildPages.build());
SpatialIndexBuilderOperatorFactory buildOperatorFactory = new SpatialIndexBuilderOperatorFactory(
1,
new PlanNodeId("test"),
buildPages.getTypes(),
Ints.asList(1),
0,
radiusChannel,
partitionChannel,
spatialRelationshipTest,
kdbTreeJson,
filterFunctionFactory,
10_000,
new TestingFactory(false));
Driver driver = Driver.createDriver(
driverContext,
valuesOperatorFactory.createOperator(driverContext),
buildOperatorFactory.createOperator(driverContext));
PagesSpatialIndexFactory pagesSpatialIndexFactory = buildOperatorFactory.getPagesSpatialIndexFactory();
ListenableFuture<PagesSpatialIndex> pagesSpatialIndex = pagesSpatialIndexFactory.createPagesSpatialIndex();
while (!pagesSpatialIndex.isDone()) {
driver.process();
}
// Release the spatial index reference
pagesSpatialIndexFactory.probeOperatorFinished();
runDriverInThread(executor, driver);
return pagesSpatialIndexFactory;
}
/**
* Runs Driver in another thread until it is finished
*/
private static void runDriverInThread(ExecutorService executor, Driver driver)
{
executor.execute(() -> {
if (!driver.isFinished()) {
try {
driver.process();
}
catch (PrestoException e) {
driver.getDriverContext().failed(e);
throw e;
}
runDriverInThread(executor, driver);
}
});
}
private TaskContext createTaskContext()
{
return TestingTaskContext.createTaskContext(executor, scheduledExecutor, TEST_SESSION);
}
private static class TestInternalJoinFilterFunction
implements InternalJoinFilterFunction
{
public interface Lambda
{
boolean filter(int leftPosition, Page leftPage, int rightPosition, Page rightPage);
}
private final TestInternalJoinFilterFunction.Lambda lambda;
private TestInternalJoinFilterFunction(TestInternalJoinFilterFunction.Lambda lambda)
{
this.lambda = lambda;
}
@Override
public boolean filter(int leftPosition, Page leftPage, int rightPosition, Page rightPage)
{
return lambda.filter(leftPosition, leftPage, rightPosition, rightPage);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.web.server;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import javax.servlet.DispatcherType;
import javax.servlet.ServletContext;
import org.apache.nifi.NiFiServer;
import org.apache.nifi.controller.FlowSerializationException;
import org.apache.nifi.controller.FlowSynchronizationException;
import org.apache.nifi.controller.UninheritableFlowException;
import org.apache.nifi.lifecycle.LifeCycleStartException;
import org.apache.nifi.nar.ExtensionMapping;
import org.apache.nifi.nar.NarClassLoaders;
import org.apache.nifi.services.FlowService;
import org.apache.nifi.util.NiFiProperties;
import org.apache.nifi.web.NiFiWebContext;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.SecureRequestCustomizer;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.server.handler.ContextHandler;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.util.resource.Resource;
import org.eclipse.jetty.util.resource.ResourceCollection;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.webapp.WebAppClassLoader;
import org.eclipse.jetty.webapp.WebAppContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.WebApplicationContextUtils;
/**
* Encapsulates the Jetty instance.
*/
public class JettyServer implements NiFiServer {
private static final Logger logger = LoggerFactory.getLogger(JettyServer.class);
private static final String WEB_DEFAULTS_XML = "org/apache/nifi/web/webdefault.xml";
private static final int HEADER_BUFFER_SIZE = 16 * 1024; // 16kb
private static final FileFilter WAR_FILTER = new FileFilter() {
@Override
public boolean accept(File pathname) {
final String nameToTest = pathname.getName().toLowerCase();
return nameToTest.endsWith(".war") && pathname.isFile();
}
};
private final Server server;
private ExtensionMapping extensionMapping;
private WebAppContext webApiContext;
private WebAppContext webDocsContext;
private Collection<WebAppContext> customUiWebContexts;
private final NiFiProperties props;
/**
* Creates and configures a new Jetty instance.
*
* @param props the configuration
*/
public JettyServer(final NiFiProperties props) {
final QueuedThreadPool threadPool = new QueuedThreadPool();
threadPool.setName("NiFi Web Server");
// create the server
this.server = new Server(threadPool);
this.props = props;
// configure server
configureConnectors(server);
// load wars from the nar working directories
loadWars(locateNarWorkingDirectories());
}
/**
* Locates the working directory for each NAR.
*
* @return
*/
private Set<File> locateNarWorkingDirectories() {
final File frameworkWorkingDir = props.getFrameworkWorkingDirectory();
final File extensionsWorkingDir = props.getExtensionsWorkingDirectory();
final File[] frameworkDir = frameworkWorkingDir.listFiles();
if (frameworkDir == null) {
throw new IllegalStateException(String.format("Unable to access framework working directory: %s", frameworkWorkingDir.getAbsolutePath()));
}
final File[] extensionDirs = extensionsWorkingDir.listFiles();
if (extensionDirs == null) {
throw new IllegalStateException(String.format("Unable to access extensions working directory: %s", extensionsWorkingDir.getAbsolutePath()));
}
// we want to consider the framework and all extension NARs
final Set<File> narWorkingDirectories = new HashSet<>(Arrays.asList(frameworkDir));
narWorkingDirectories.addAll(Arrays.asList(extensionDirs));
return narWorkingDirectories;
}
/**
* Loads the WARs in the specified NAR working directories. A WAR file must
* have a ".war" extension.
*
* @param warDir a directory containing WARs to load
*/
private void loadWars(final Set<File> narWorkingDirectories) {
// load WARs
Map<File, File> warToNarWorkingDirectoryLookup = findWars(narWorkingDirectories);
// locate each war being deployed
File webUiWar = null;
File webApiWar = null;
File webErrorWar = null;
File webDocsWar = null;
List<File> otherWars = new ArrayList<>();
for (File war : warToNarWorkingDirectoryLookup.keySet()) {
if (war.getName().toLowerCase().startsWith("nifi-web-api")) {
webApiWar = war;
} else if (war.getName().toLowerCase().startsWith("nifi-web-error")) {
webErrorWar = war;
} else if (war.getName().toLowerCase().startsWith("nifi-web-docs")) {
webDocsWar = war;
} else if (war.getName().toLowerCase().startsWith("nifi-web")) {
webUiWar = war;
} else {
otherWars.add(war);
}
}
// ensure the required wars were found
if (webUiWar == null) {
throw new RuntimeException("Unable to load nifi-web WAR");
} else if (webApiWar == null) {
throw new RuntimeException("Unable to load nifi-web-api WAR");
} else if (webDocsWar == null) {
throw new RuntimeException("Unable to load nifi-web-docs WAR");
} else if (webErrorWar == null) {
throw new RuntimeException("Unable to load nifi-web-error WAR");
}
// handlers for each war and init params for the web api
final HandlerCollection handlers = new HandlerCollection();
final Map<String, String> initParams = new HashMap<>();
final ClassLoader frameworkClassLoader = getClass().getClassLoader();
final ClassLoader jettyClassLoader = frameworkClassLoader.getParent();
// deploy the other wars
if (CollectionUtils.isNotEmpty(otherWars)) {
customUiWebContexts = new ArrayList<>();
for (File war : otherWars) {
// see if this war is a custom processor ui
List<String> customUiProcessorTypes = getCustomUiProcessorTypes(war);
// only include wars that are for custom processor ui's
if (CollectionUtils.isNotEmpty(customUiProcessorTypes)) {
String warName = StringUtils.substringBeforeLast(war.getName(), ".");
String warContextPath = String.format("/%s", warName);
// attempt to locate the nar class loader for this war
ClassLoader narClassLoaderForWar = NarClassLoaders.getExtensionClassLoader(warToNarWorkingDirectoryLookup.get(war));
// this should never be null
if (narClassLoaderForWar == null) {
narClassLoaderForWar = jettyClassLoader;
}
// create the custom ui web app context
WebAppContext customUiContext = loadWar(war, warContextPath, narClassLoaderForWar);
// hold on to a reference to all custom ui web app contexts
customUiWebContexts.add(customUiContext);
// include custom ui web context in the handlers
handlers.addHandler(customUiContext);
// add the initialization paramters
for (String customUiProcessorType : customUiProcessorTypes) {
// map the processor type to the custom ui path
initParams.put(customUiProcessorType, warContextPath);
}
}
}
}
// load the web ui app
handlers.addHandler(loadWar(webUiWar, "/nifi", frameworkClassLoader));
// load the web api app
webApiContext = loadWar(webApiWar, "/nifi-api", frameworkClassLoader);
Map<String, String> webApiInitParams = webApiContext.getInitParams();
webApiInitParams.putAll(initParams);
handlers.addHandler(webApiContext);
// create a web app for the docs
final String docsContextPath = "/nifi-docs";
// load the documentation war
webDocsContext = loadWar(webDocsWar, docsContextPath, frameworkClassLoader);
// overlay the actual documentation
final ContextHandlerCollection documentationHandlers = new ContextHandlerCollection();
documentationHandlers.addHandler(createDocsWebApp(docsContextPath));
documentationHandlers.addHandler(webDocsContext);
handlers.addHandler(documentationHandlers);
// load the web error app
handlers.addHandler(loadWar(webErrorWar, "/", frameworkClassLoader));
// deploy the web apps
server.setHandler(handlers);
}
/**
* Finds WAR files in the specified NAR working directories.
*
* @param narWorkingDirectories
* @return
*/
private Map<File, File> findWars(final Set<File> narWorkingDirectories) {
final Map<File, File> wars = new HashMap<>();
// consider each nar working directory
for (final File narWorkingDirectory : narWorkingDirectories) {
final File narDependencies = new File(narWorkingDirectory, "META-INF/dependencies");
if (narDependencies.isDirectory()) {
// list the wars from this nar
final File[] narDependencyDirs = narDependencies.listFiles(WAR_FILTER);
if (narDependencyDirs == null) {
throw new IllegalStateException(String.format("Unable to access working directory for NAR dependencies in: %s", narDependencies.getAbsolutePath()));
}
// add each war
for (final File war : narDependencyDirs) {
wars.put(war, narWorkingDirectory);
}
}
}
return wars;
}
/**
* Loads the processor types that the specified war file is a custom UI for.
*
* @param warFile
* @return
*/
private List<String> getCustomUiProcessorTypes(final File warFile) {
List<String> processorTypes = new ArrayList<>();
JarFile jarFile = null;
try {
// load the jar file and attempt to find the nifi-processor entry
jarFile = new JarFile(warFile);
JarEntry jarEntry = jarFile.getJarEntry("META-INF/nifi-processor");
// ensure the nifi-processor entry was found
if (jarEntry != null) {
// get an input stream for the nifi-processor configuration file
BufferedReader in = new BufferedReader(new InputStreamReader(jarFile.getInputStream(jarEntry)));
// read in each configured type
String processorType;
while ((processorType = in.readLine()) != null) {
// ensure the line isn't blank
if (StringUtils.isNotBlank(processorType)) {
processorTypes.add(processorType);
}
}
}
} catch (IOException ioe) {
logger.warn(String.format("Unable to inspect %s for a custom processor UI.", warFile));
} finally {
try {
// close the jar file - which closes all input streams obtained via getInputStream above
if (jarFile != null) {
jarFile.close();
}
} catch (IOException ioe) {
}
}
return processorTypes;
}
private WebAppContext loadWar(final File warFile, final String contextPath, final ClassLoader parentClassLoader) {
final WebAppContext webappContext = new WebAppContext(warFile.getPath(), contextPath);
webappContext.setContextPath(contextPath);
webappContext.setDisplayName(contextPath);
// remove slf4j server class to allow WAR files to have slf4j dependencies in WEB-INF/lib
List<String> serverClasses = new ArrayList<>(Arrays.asList(webappContext.getServerClasses()));
serverClasses.remove("org.slf4j.");
webappContext.setServerClasses(serverClasses.toArray(new String[0]));
webappContext.setDefaultsDescriptor(WEB_DEFAULTS_XML);
// get the temp directory for this webapp
File tempDir = new File(props.getWebWorkingDirectory(), warFile.getName());
if (tempDir.exists() && !tempDir.isDirectory()) {
throw new RuntimeException(tempDir.getAbsolutePath() + " is not a directory");
} else if (!tempDir.exists()) {
final boolean made = tempDir.mkdirs();
if (!made) {
throw new RuntimeException(tempDir.getAbsolutePath() + " could not be created");
}
}
if (!(tempDir.canRead() && tempDir.canWrite())) {
throw new RuntimeException(tempDir.getAbsolutePath() + " directory does not have read/write privilege");
}
// configure the temp dir
webappContext.setTempDirectory(tempDir);
// configure the max form size (3x the default)
webappContext.setMaxFormContentSize(600000);
try {
// configure the class loader - webappClassLoader -> jetty nar -> web app's nar -> ...
webappContext.setClassLoader(new WebAppClassLoader(parentClassLoader, webappContext));
} catch (final IOException ioe) {
startUpFailure(ioe);
}
logger.info("Loading WAR: " + warFile.getAbsolutePath() + " with context path set to " + contextPath);
return webappContext;
}
private ContextHandler createDocsWebApp(final String contextPath) {
try {
final ResourceHandler resourceHandler = new ResourceHandler();
resourceHandler.setDirectoriesListed(false);
// load the docs directory
final File docsDir = Paths.get("docs").toRealPath().toFile();
final Resource docsResource = Resource.newResource(docsDir);
// load the component documentation working directory
final String componentDocsDirPath = props.getProperty(NiFiProperties.COMPONENT_DOCS_DIRECTORY, "work/docs/components");
final File workingDocsDirectory = Paths.get(componentDocsDirPath).toRealPath().getParent().toFile();
final Resource workingDocsResource = Resource.newResource(workingDocsDirectory);
// create resources for both docs locations
final ResourceCollection resources = new ResourceCollection(docsResource, workingDocsResource);
resourceHandler.setBaseResource(resources);
// create the context handler
final ContextHandler handler = new ContextHandler(contextPath);
handler.setHandler(resourceHandler);
logger.info("Loading documents web app with context path set to " + contextPath);
return handler;
} catch (Exception ex) {
throw new IllegalStateException("Resource directory paths are malformed: " + ex.getMessage());
}
}
private void configureConnectors(final Server server) throws ServerConfigurationException {
// create the http configuration
final HttpConfiguration httpConfiguration = new HttpConfiguration();
httpConfiguration.setRequestHeaderSize(HEADER_BUFFER_SIZE);
httpConfiguration.setResponseHeaderSize(HEADER_BUFFER_SIZE);
if (props.getPort() != null) {
final Integer port = props.getPort();
if (port < 0 || (int) Math.pow(2, 16) <= port) {
throw new ServerConfigurationException("Invalid HTTP port: " + port);
}
logger.info("Configuring Jetty for HTTP on port: " + port);
// create the connector
final ServerConnector http = new ServerConnector(server, new HttpConnectionFactory(httpConfiguration));
// set host and port
if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.WEB_HTTP_HOST))) {
http.setHost(props.getProperty(NiFiProperties.WEB_HTTP_HOST));
}
http.setPort(port);
// add this connector
server.addConnector(http);
}
if (props.getSslPort() != null) {
final Integer port = props.getSslPort();
if (port < 0 || (int) Math.pow(2, 16) <= port) {
throw new ServerConfigurationException("Invalid HTTPs port: " + port);
}
logger.info("Configuring Jetty for HTTPs on port: " + port);
// add some secure config
final HttpConfiguration httpsConfiguration = new HttpConfiguration(httpConfiguration);
httpsConfiguration.setSecureScheme("https");
httpsConfiguration.setSecurePort(props.getSslPort());
httpsConfiguration.addCustomizer(new SecureRequestCustomizer());
// build the connector
final ServerConnector https = new ServerConnector(server,
new SslConnectionFactory(createSslContextFactory(), "http/1.1"),
new HttpConnectionFactory(httpsConfiguration));
// set host and port
if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.WEB_HTTPS_HOST))) {
https.setHost(props.getProperty(NiFiProperties.WEB_HTTPS_HOST));
}
https.setPort(port);
// add this connector
server.addConnector(https);
}
}
private SslContextFactory createSslContextFactory() {
final SslContextFactory contextFactory = new SslContextFactory();
// need client auth
contextFactory.setNeedClientAuth(props.getNeedClientAuth());
/* below code sets JSSE system properties when values are provided */
// keystore properties
if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.SECURITY_KEYSTORE))) {
contextFactory.setKeyStorePath(props.getProperty(NiFiProperties.SECURITY_KEYSTORE));
}
if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.SECURITY_KEYSTORE_TYPE))) {
contextFactory.setKeyStoreType(props.getProperty(NiFiProperties.SECURITY_KEYSTORE_TYPE));
}
final String keystorePassword = props.getProperty(NiFiProperties.SECURITY_KEYSTORE_PASSWD);
final String keyPassword = props.getProperty(NiFiProperties.SECURITY_KEY_PASSWD);
if (StringUtils.isNotBlank(keystorePassword)) {
// if no key password was provided, then assume the keystore password is the same as the key password.
final String defaultKeyPassword = (StringUtils.isBlank(keyPassword)) ? keystorePassword : keyPassword;
contextFactory.setKeyManagerPassword(keystorePassword);
contextFactory.setKeyStorePassword(defaultKeyPassword);
} else if (StringUtils.isNotBlank(keyPassword)) {
// since no keystore password was provided, there will be no keystore integrity check
contextFactory.setKeyStorePassword(keyPassword);
}
// truststore properties
if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE))) {
contextFactory.setTrustStorePath(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE));
}
if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE_TYPE))) {
contextFactory.setTrustStoreType(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE_TYPE));
}
if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE_PASSWD))) {
contextFactory.setTrustStorePassword(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE_PASSWD));
}
return contextFactory;
}
/**
* Starts the web server.
*/
@Override
public void start() {
try {
// start the server
server.start();
// ensure everything started successfully
for (Handler handler : server.getChildHandlers()) {
// see if the handler is a web app
if (handler instanceof WebAppContext) {
WebAppContext context = (WebAppContext) handler;
// see if this webapp had any exceptions that would
// cause it to be unavailable
if (context.getUnavailableException() != null) {
startUpFailure(context.getUnavailableException());
}
}
}
// ensure the appropriate wars deployed successfully before injecting the NiFi context and security filters -
// this must be done after starting the server (and ensuring there were no start up failures)
if (webApiContext != null && CollectionUtils.isNotEmpty(customUiWebContexts)) {
final ServletContext webApiServletContext = webApiContext.getServletHandler().getServletContext();
final WebApplicationContext webApplicationContext = WebApplicationContextUtils.getRequiredWebApplicationContext(webApiServletContext);
final NiFiWebContext NiFiWebContext = webApplicationContext.getBean("nifiWebContext", NiFiWebContext.class);
for (final WebAppContext customUiContext : customUiWebContexts) {
// set the NiFi context in each custom ui servlet context
final ServletContext customUiServletContext = customUiContext.getServletHandler().getServletContext();
customUiServletContext.setAttribute("nifi-web-context", NiFiWebContext);
// add the security filter to any custom ui wars
final FilterHolder securityFilter = webApiContext.getServletHandler().getFilter("springSecurityFilterChain");
if (securityFilter != null) {
customUiContext.addFilter(securityFilter, "/*", EnumSet.of(DispatcherType.REQUEST));
}
}
}
// ensure the web document war was loaded and provide the extension mapping
if (webDocsContext != null) {
final ServletContext webDocsServletContext = webDocsContext.getServletHandler().getServletContext();
webDocsServletContext.setAttribute("nifi-extension-mapping", extensionMapping);
}
// if this nifi is a node in a cluster, start the flow service and load the flow - the
// flow service is loaded here for clustered nodes because the loading of the flow will
// initialize the connection between the node and the NCM. if the node connects (starts
// heartbeating, etc), the NCM may issue web requests before the application (wars) have
// finished loading. this results in the node being disconnected since its unable to
// successfully respond to the requests. to resolve this, flow loading was moved to here
// (after the wars have been successfully deployed) when this nifi instance is a node
// in a cluster
if (props.isNode()) {
FlowService flowService = null;
try {
logger.info("Loading Flow...");
ApplicationContext ctx = WebApplicationContextUtils.getWebApplicationContext(webApiContext.getServletContext());
flowService = ctx.getBean("flowService", FlowService.class);
// start and load the flow
flowService.start();
flowService.load(null);
logger.info("Flow loaded successfully.");
} catch (BeansException | LifeCycleStartException | IOException | FlowSerializationException | FlowSynchronizationException | UninheritableFlowException e) {
// ensure the flow service is terminated
if (flowService != null && flowService.isRunning()) {
flowService.stop(false);
}
throw new Exception("Unable to load flow due to: " + e, e);
}
}
// dump the application url after confirming everything started successfully
dumpUrls();
} catch (Exception ex) {
startUpFailure(ex);
}
}
/**
* Dump each applicable url.
*
* @throws SocketException
*/
private void dumpUrls() throws SocketException {
final List<String> urls = new ArrayList<>();
for (Connector connector : server.getConnectors()) {
if (connector instanceof ServerConnector) {
final ServerConnector serverConnector = (ServerConnector) connector;
Set<String> hosts = new HashSet<>();
// determine the hosts
if (StringUtils.isNotBlank(serverConnector.getHost())) {
hosts.add(serverConnector.getHost());
} else {
Enumeration<NetworkInterface> networkInterfaces = NetworkInterface.getNetworkInterfaces();
if (networkInterfaces != null) {
for (NetworkInterface networkInterface : Collections.list(networkInterfaces)) {
for (InetAddress inetAddress : Collections.list(networkInterface.getInetAddresses())) {
hosts.add(inetAddress.getHostAddress());
}
}
}
}
// ensure some hosts were found
if (!hosts.isEmpty()) {
String scheme = "http";
if (props.getSslPort() != null && serverConnector.getPort() == props.getSslPort()) {
scheme = "https";
}
// dump each url
for (String host : hosts) {
urls.add(String.format("%s://%s:%s", scheme, host, serverConnector.getPort()));
}
}
}
}
if (urls.isEmpty()) {
logger.warn("NiFi has started, but the UI is not available on any hosts. Please verify the host properties.");
} else {
// log the ui location
logger.info("NiFi has started. The UI is available at the following URLs:");
for (final String url : urls) {
logger.info(String.format("%s/nifi", url));
}
// log the rest api location
logger.info("The REST API documentation is available at the following URLs:");
for (final String url : urls) {
logger.info(String.format("%s/nifi-api/docs", url));
}
}
}
/**
* Handles when a start up failure occurs.
*
* @param t
*/
private void startUpFailure(Throwable t) {
System.err.println("Failed to start web server: " + t.getMessage());
System.err.println("Shutting down...");
logger.warn("Failed to start web server... shutting down.", t);
System.exit(1);
}
@Override
public void setExtensionMapping(ExtensionMapping extensionMapping) {
this.extensionMapping = extensionMapping;
}
/**
* Stops the web server.
*/
@Override
public void stop() {
try {
server.stop();
} catch (Exception ex) {
logger.warn("Failed to stop web server", ex);
}
}
}
| |
package pl.pronux.sokker.ui.configure;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.TreeItem;
import pl.pronux.sokker.data.properties.SVProperties;
import pl.pronux.sokker.model.SokkerViewerSettings;
import pl.pronux.sokker.resources.Messages;
import pl.pronux.sokker.ui.beans.ConfigBean;
import pl.pronux.sokker.ui.interfaces.IViewConfigure;
import pl.pronux.sokker.ui.widgets.shells.BugReporter;
public class ViewPlugins implements IViewConfigure {
private TreeItem treeItem;
private Composite composite;
// private Group proxyGroup;
private SVProperties pluginsProperties;
private Table pluginsTable;
// private SVProperties confProperties;
private String[] plugins;
private SokkerViewerSettings settings;
public void clear() {
}
public void dispose() {
}
public Composite getComposite() {
return composite;
}
public TreeItem getTreeItem() {
return treeItem;
}
public void init(Composite composite) {
this.composite = composite;
composite.setLayout(new FormLayout());
FormData formData;
pluginsProperties = new SVProperties();
try {
pluginsProperties.load(new FileInputStream(settings.getBaseDirectory() + File.separator + "settings" + File.separator + "plugins.properties"));
} catch (FileNotFoundException e) {
new BugReporter(composite.getDisplay()).openErrorMessage("ViewComposite plugins", e);
} catch (IOException e) {
new BugReporter(composite.getDisplay()).openErrorMessage("ViewComposite plugins", e);
}
plugins = pluginsProperties.getProperty("plugins").split(";");
formData = new FormData(50, 25);
formData.left = new FormAttachment(50, -formData.width - 10);
formData.bottom = new FormAttachment(100, -10);
Button upButton = new Button(composite, SWT.ARROW | SWT.UP);
upButton.setLayoutData(formData);
upButton.setFont(ConfigBean.getFontTable());
upButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event arg0) {
TableItem[] tableItems = pluginsTable.getSelection();
if (tableItems.length == 1) {
int index = pluginsTable.indexOf(tableItems[0]);
if (index > 0) {
TableItem tableItem = new TableItem(pluginsTable, SWT.NONE, index - 1);
for (int i = 0; i < pluginsTable.getItemCount(); i++) {
tableItem.setText(i, tableItems[0].getText(i));
}
tableItem.setData("plugin", tableItems[0].getData("plugin"));
tableItem.setChecked(tableItems[0].getChecked());
tableItems[0].dispose();
pluginsTable.setSelection(index - 1);
}
}
}
});
formData = new FormData(50, 25);
formData.left = new FormAttachment(upButton, 20);
formData.bottom = new FormAttachment(100, -10);
Button downButton = new Button(composite, SWT.ARROW | SWT.DOWN);
downButton.setLayoutData(formData);
downButton.setFont(ConfigBean.getFontTable());
downButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event arg0) {
TableItem[] tableItems = pluginsTable.getSelection();
if (tableItems.length == 1) {
int index = pluginsTable.indexOf(tableItems[0]);
if (index < pluginsTable.getItemCount() - 1) {
TableItem tableItem = new TableItem(pluginsTable, SWT.NONE, index + 2);
for (int i = 0; i < pluginsTable.getItemCount(); i++) {
tableItem.setText(i, tableItems[0].getText(i));
}
tableItem.setData("plugin", tableItems[0].getData("plugin"));
tableItem.setChecked(tableItems[0].getChecked());
tableItems[0].dispose();
pluginsTable.setSelection(index + 1);
}
}
}
});
formData = new FormData();
formData.top = new FormAttachment(0, 0);
formData.left = new FormAttachment(0, 0);
formData.right = new FormAttachment(100, 0);
formData.bottom = new FormAttachment(downButton, -10);
// formData = new FormData();
// formData.top = new FormAttachment(0, 30);
// formData.left = new FormAttachment(0, 20);
// formData.right = new FormAttachment(100, -20);
// formData.height = 450;
pluginsTable = new Table(composite, SWT.BORDER | SWT.CHECK | SWT.SINGLE | SWT.FULL_SELECTION);
pluginsTable.setHeaderVisible(true);
pluginsTable.setLinesVisible(true);
pluginsTable.setFont(ConfigBean.getFontMain());
pluginsTable.setLayoutData(formData);
String[] columns = {
Messages.getString("configure.plugins.table.name"),
Messages.getString("configure.plugins.table.description")
};
for (int i = 0; i < columns.length; i++) {
TableColumn column = new TableColumn(pluginsTable, SWT.NONE);
column.setText(columns[i]);
column.setResizable(false);
column.setMoveable(false);
}
treeItem.setText(Messages.getString("configure.plugins"));
fillTable(pluginsTable, plugins);
composite.layout(true);
}
private void fillTable(Table table, String[] plugins) {
table.remove(0, table.getItemCount() - 1);
TableItem item;
for (int i = 0; i < plugins.length; i++) {
String viewName = plugins[i];
String[] viewNameTmp = viewName.split("\\.");
String viewSimpleName = viewNameTmp[viewNameTmp.length - 1];
int pluginTurn = Integer.valueOf(pluginsProperties.getProperty(viewName + ".turn")).intValue();
item = new TableItem(pluginsTable, SWT.NONE);
item.setText(0, Messages.getString("tree." + viewSimpleName));
item.setText(1, Messages.getString(viewName + ".description"));
item.setData("plugin", plugins[i]);
if (pluginTurn == 1) {
item.setChecked(true);
} else {
item.setChecked(false);
}
}
for (int i = 0; i < pluginsTable.getColumnCount(); i++) {
pluginsTable.getColumn(i).pack();
}
}
public void setSettings(SokkerViewerSettings sokkerViewerSettings) {
this.settings = sokkerViewerSettings;
}
public void setTreeItem(TreeItem treeItem) {
this.treeItem = treeItem;
}
public void set() {
}
public void applyChanges() {
StringBuilder pluginsDirection = new StringBuilder();
for (int i = 0; i < pluginsTable.getItemCount(); i++) {
TableItem item = pluginsTable.getItem(i);
pluginsDirection.append((String) item.getData("plugin")).append(";");
if (item.getChecked()) {
pluginsProperties.setProperty((String) item.getData("plugin") + ".turn", "1");
} else {
pluginsProperties.setProperty((String) item.getData("plugin") + ".turn", "0");
}
}
pluginsProperties.setProperty("plugins", pluginsDirection.toString());
try {
pluginsProperties.store(new FileOutputStream(new File(settings.getBaseDirectory() + File.separator + "settings" + File.separator + "plugins.properties")), "");
} catch (FileNotFoundException e) {
new BugReporter(composite.getDisplay()).openErrorMessage("ViewComposite plugins", e);
} catch (IOException e) {
new BugReporter(composite.getDisplay()).openErrorMessage("ViewComposite plugins", e);
}
}
public void restoreDefaultChanges() {
fillTable(pluginsTable, plugins);
}
}
| |
package com.khorn.terraincontrol.generator.biome.layers;
import com.khorn.terraincontrol.LocalBiome;
import com.khorn.terraincontrol.LocalWorld;
import com.khorn.terraincontrol.TerrainControl;
import com.khorn.terraincontrol.configuration.*;
import com.khorn.terraincontrol.configuration.standard.WorldStandardValues;
import com.khorn.terraincontrol.util.minecraftTypes.DefaultBiome;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Class responsible for creating instances of {@link Layer} tailored towards
* biome modes.
*
*/
public final class LayerFactory
{
private LayerFactory()
{
// No instances
}
/**
* Creates a pair of layers for use with the normal biome mode.
* @param world World to create layers for.
* @return The pair of layers.
*/
public static Layer[] createNormal(LocalWorld world)
{
Layer mainLayer = initMainLayer(world);
Layer zoomedLayer = new LayerZoomVoronoi(10L, mainLayer);
zoomedLayer.initWorldGenSeed(world.getSeed());
return new Layer[] {mainLayer, zoomedLayer};
}
/**
* Creates a pair of layers for use with FromImage biome mode.
* @param world The world to create layers for.
* @return The pair.
*/
public static Layer[] createFromImage(LocalWorld world)
{
WorldConfig worldConfig = world.getConfigs().getWorldConfig();
Layer mainLayer = initMainLayer(world);
if (worldConfig.imageMode == WorldConfig.ImageMode.ContinueNormal)
{
mainLayer = new LayerFromImage(1L, mainLayer, worldConfig, world);
} else
{
mainLayer = new LayerFromImage(1L, null, worldConfig, world);
}
Layer zoomedLayer = new LayerZoomVoronoi(10L, mainLayer);
zoomedLayer.initWorldGenSeed(world.getSeed());
return new Layer[] {mainLayer, zoomedLayer};
}
/**
* Creates a pair of layers for use with the BeforeGroups biome mode.
* @param world The world to create layers for.
* @return The pair.
*/
public static Layer[] createBeforeGroups(LocalWorld world)
{
/*
* int BigLandSize = 2; //default 0, more - smaller int
* ChanceToIncreaseLand = 6; //default 4 int MaxDepth = 10;
*/
ConfigProvider configs = world.getConfigs();
WorldConfig worldConfig = configs.getWorldConfig();
BiomeGroupManager worldGroupManager = worldConfig.biomeGroupManager;
BiomeGroup normalGroup = worldGroupManager.getGroupByName(WorldStandardValues.NORMAL_BIOMES.getName());
BiomeGroup iceGroup = worldGroupManager.getGroupByName(WorldStandardValues.ICE_BIOMES.getName());
if (normalGroup == null)
{
// Create an empty group to avoid having to check for null
// everywhere
normalGroup = new BiomeGroup(worldConfig, "", 0, 0, Collections.<String> emptyList());
normalGroup.processBiomeData(world);
}
if (iceGroup == null)
{
iceGroup = new BiomeGroup(worldConfig, "", 0, 0, Collections.<String> emptyList());
iceGroup.processBiomeData(world);
}
LocalBiome[][] normalBiomeMap = new LocalBiome[worldConfig.GenerationDepth + 1][];
LocalBiome[][] iceBiomeMap = new LocalBiome[worldConfig.GenerationDepth + 1][];
for (int i = 0; i < worldConfig.GenerationDepth + 1; i++)
{
List<LocalBiome> normalBiomes = new ArrayList<LocalBiome>();
List<LocalBiome> iceBiomes = new ArrayList<LocalBiome>();
for (LocalBiome biome : configs.getBiomeArray())
{
if (biome == null)
continue;
BiomeConfig biomeConfig = biome.getBiomeConfig();
if (biomeConfig.biomeSize != i)
continue;
if (normalGroup.containsBiome(biomeConfig.getName()))
{
for (int t = 0; t < biomeConfig.biomeRarity; t++)
normalBiomes.add(biome);
normalGroup.totalGroupRarity -= biomeConfig.biomeRarity;
}
if (iceGroup.containsBiome(biomeConfig.getName()))
{
for (int t = 0; t < biomeConfig.biomeRarity; t++)
iceBiomes.add(biome);
iceGroup.totalGroupRarity -= biomeConfig.biomeRarity;
}
}
if (!normalBiomes.isEmpty())
normalBiomeMap[i] = normalBiomes.toArray(new LocalBiome[normalBiomes.size() + normalGroup.totalGroupRarity]);
else
normalBiomeMap[i] = new LocalBiome[0];
if (!iceBiomes.isEmpty())
iceBiomeMap[i] = iceBiomes.toArray(new LocalBiome[iceBiomes.size() + iceGroup.totalGroupRarity]);
else
iceBiomeMap[i] = new LocalBiome[0];
}
Layer mainLayer = new LayerEmpty(1L);
Layer RiverLayer = new LayerEmpty(1L);
boolean riversStarted = false;
for (int depth = 0; depth <= worldConfig.GenerationDepth; depth++)
{
mainLayer = new LayerZoom(2001 + depth, mainLayer);
if (worldConfig.randomRivers && riversStarted)
RiverLayer = new LayerZoom(2001 + depth, RiverLayer);
if (worldConfig.LandSize == depth)
{
mainLayer = new LayerLand(1L, mainLayer, worldConfig.LandRarity);
mainLayer = new LayerZoomFuzzy(2000L, mainLayer);
}
if (depth < (worldConfig.LandSize + worldConfig.LandFuzzy))
mainLayer = new LayerLandRandom(depth, mainLayer);
if (normalBiomeMap[depth].length != 0 || iceBiomeMap[depth].length != 0)
mainLayer = new LayerBiomeBeforeGroups(200, mainLayer, normalBiomeMap[depth], iceBiomeMap[depth]);
if (iceGroup.getGenerationDepth() == depth)
mainLayer = new LayerIce(depth, mainLayer, iceGroup.getGroupRarity());
if (worldConfig.riverRarity == depth)
if (worldConfig.randomRivers)
{
RiverLayer = new LayerRiverInit(155, RiverLayer);
riversStarted = true;
} else
mainLayer = new LayerRiverInit(155, mainLayer);
if ((worldConfig.GenerationDepth - worldConfig.riverSize) == depth)
{
if (worldConfig.randomRivers)
RiverLayer = new LayerRiver(5 + depth, RiverLayer);
else
mainLayer = new LayerRiver(5 + depth, mainLayer);
}
LayerBiomeBorder layerBiomeBorder = new LayerBiomeBorder(3000 + depth, world);
LayerBiomeInBiome layerBiomeIsle = new LayerBiomeInBiome(mainLayer, world.getSeed());
boolean haveBorder = false;
boolean haveIsle = false;
for (LocalBiome biome : configs.getBiomeArray())
{
if (biome == null)
continue;
BiomeConfig biomeConfig = biome.getBiomeConfig();
if (biomeConfig.biomeSize == depth
&& worldConfig.IsleBiomes.contains(biomeConfig.getName())
&& biomeConfig.isleInBiome != null)
{
haveIsle = true;
boolean[] biomeCanSpawnIn = new boolean[TerrainControl.SUPPORTED_BIOME_IDS];
boolean inOcean = false;
for (String islandInName : biomeConfig.isleInBiome)
{
final LocalBiome localBiome = world.getBiomeByName(islandInName);
if (localBiome == null) {
continue;
}
int islandIn = localBiome.getIds().getGenerationId();
if (islandIn == DefaultBiome.OCEAN.Id)
{
inOcean = true;
} else {
biomeCanSpawnIn[islandIn] = true;
}
}
int chance = (worldConfig.BiomeRarityScale + 1) - biomeConfig.biomeRarity;
layerBiomeIsle.addIsle(biome, chance, biomeCanSpawnIn, inOcean);
}
if (biomeConfig.biomeSize == depth
&& worldConfig.BorderBiomes.contains(biomeConfig.getName())
&& biomeConfig.biomeIsBorder != null)
{
haveBorder = true;
for (String replaceFromName : biomeConfig.biomeIsBorder)
{
final LocalBiome localBiome = world.getBiomeByName(replaceFromName);
if (localBiome == null) {
continue;
}
int replaceFrom = localBiome.getIds().getGenerationId();
layerBiomeBorder.addBiome(biome, replaceFrom, world);
}
}
}
if (haveIsle)
{
layerBiomeIsle.child = mainLayer;
mainLayer = layerBiomeIsle;
}
if (haveBorder)
{
layerBiomeBorder.child = mainLayer;
mainLayer = layerBiomeBorder;
}
}
if (worldConfig.randomRivers)
mainLayer = new LayerMixWithRiver(1L, mainLayer, RiverLayer, configs, world);
else
mainLayer = new LayerMix(1L, mainLayer, configs, world);
mainLayer = new LayerSmooth(400L, mainLayer);
if (worldConfig.biomeMode == TerrainControl.getBiomeModeManager().FROM_IMAGE)
{
if (worldConfig.imageMode == WorldConfig.ImageMode.ContinueNormal)
mainLayer = new LayerFromImage(1L, mainLayer, worldConfig, world);
else
mainLayer = new LayerFromImage(1L, null, worldConfig, world);
}
Layer zoomedLayer = new LayerZoomVoronoi(10L, mainLayer);
zoomedLayer.initWorldGenSeed(world.getSeed());
return new Layer[] {mainLayer, zoomedLayer};
}
private static Layer initMainLayer(LocalWorld world)
{
ConfigProvider configs = world.getConfigs();
WorldConfig worldConfig = configs.getWorldConfig();
BiomeGroupManager groupManager = worldConfig.biomeGroupManager;
Layer mainLayer = new LayerEmpty(1L);
Layer RiverLayer = new LayerEmpty(1L);
boolean riversStarted = false;
for (int depth = 0; depth <= worldConfig.GenerationDepth; depth++)
{
mainLayer = new LayerZoom(2001 + depth, mainLayer);
if (worldConfig.randomRivers && riversStarted)
RiverLayer = new LayerZoom(2001 + depth, RiverLayer);
if (worldConfig.LandSize == depth)
{
mainLayer = new LayerLand(1L, mainLayer, worldConfig.LandRarity);
mainLayer = new LayerZoomFuzzy(2000L, mainLayer);
}
if (depth < (worldConfig.LandSize + worldConfig.LandFuzzy))
{
mainLayer = new LayerLandRandom(depth, mainLayer);
}
if (!groupManager.isGroupDepthMapEmpty(depth))
{
mainLayer = new LayerBiomeGroups(mainLayer, groupManager, depth, worldConfig.FreezeAllColdGroupBiomes);
}
if (!groupManager.isBiomeDepthMapEmpty(depth))
{
mainLayer = new LayerBiome(200, mainLayer, groupManager, depth, worldConfig.FrozenOceanTemperature);
}
if (depth == 3)
{
mainLayer = new LayerIce(depth, mainLayer);
}
if (worldConfig.riverRarity == depth)
{
if (worldConfig.randomRivers)
{
RiverLayer = new LayerRiverInit(155, RiverLayer);
riversStarted = true;
} else
mainLayer = new LayerRiverInit(155, mainLayer);
}
if ((worldConfig.GenerationDepth - worldConfig.riverSize) == depth)
{
if (worldConfig.randomRivers)
RiverLayer = new LayerRiver(5 + depth, RiverLayer);
else
mainLayer = new LayerRiver(5 + depth, mainLayer);
}
LayerBiomeBorder layerBiomeBorder = new LayerBiomeBorder(3000 + depth, world);
LayerBiomeInBiome layerBiomeIsle = new LayerBiomeInBiome(mainLayer, world.getSeed());
boolean haveBorder = false;
boolean haveIsle = false;
for (LocalBiome biome : configs.getBiomeArray())
{
if (biome == null)
continue;
BiomeConfig biomeConfig = biome.getBiomeConfig();
if (biomeConfig.biomeSizeWhenIsle == depth
&& worldConfig.IsleBiomes.contains(biomeConfig.getName())
&& biomeConfig.isleInBiome != null)
{
haveIsle = true;
boolean[] biomeCanSpawnIn = new boolean[TerrainControl.SUPPORTED_BIOME_IDS];
boolean inOcean = false;
for (String islandInName : biomeConfig.isleInBiome)
{
final LocalBiome localBiome = world.getBiomeByName(islandInName);
if (localBiome == null) {
continue;
}
int islandIn = localBiome.getIds().getGenerationId();
if (islandIn == DefaultBiome.OCEAN.Id)
{
inOcean = true;
} else {
biomeCanSpawnIn[islandIn] = true;
}
}
int chance = (worldConfig.BiomeRarityScale + 1) - biomeConfig.biomeRarityWhenIsle;
layerBiomeIsle.addIsle(biome, chance, biomeCanSpawnIn, inOcean);
}
if (biomeConfig.biomeSizeWhenBorder == depth
&& worldConfig.BorderBiomes.contains(biomeConfig.getName())
&& biomeConfig.biomeIsBorder != null)
{
haveBorder = true;
for (String replaceFromName : biomeConfig.biomeIsBorder)
{
final LocalBiome localBiome = world.getBiomeByName(replaceFromName);
if (localBiome == null) {
continue;
}
int replaceFrom = localBiome.getIds().getGenerationId();
layerBiomeBorder.addBiome(biome, replaceFrom, world);
}
}
}
if (haveIsle)
{
layerBiomeIsle.child = mainLayer;
mainLayer = layerBiomeIsle;
}
if (haveBorder)
{
layerBiomeBorder.child = mainLayer;
mainLayer = layerBiomeBorder;
}
}
if (worldConfig.randomRivers)
mainLayer = new LayerMixWithRiver(1L, mainLayer, RiverLayer, configs, world);
else
mainLayer = new LayerMix(1L, mainLayer, configs, world);
mainLayer = new LayerSmooth(400L, mainLayer);
return mainLayer;
}
}
| |
// Copyright 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.appmenu;
import android.animation.Animator;
import android.animation.Animator.AnimatorListener;
import android.animation.AnimatorSet;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Surface;
import android.view.View;
import android.view.View.MeasureSpec;
import android.view.View.OnKeyListener;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ImageButton;
import android.widget.ListPopupWindow;
import android.widget.ListView;
import android.widget.PopupWindow;
import android.widget.PopupWindow.OnDismissListener;
import org.chromium.base.AnimationFrameTimeHistogram;
import org.chromium.base.ApiCompatibilityUtils;
import org.chromium.base.SysUtils;
import org.chromium.base.VisibleForTesting;
import org.chromium.chrome.R;
import java.util.ArrayList;
import java.util.List;
/**
* Shows a popup of menuitems anchored to a host view. When a item is selected we call
* Activity.onOptionsItemSelected with the appropriate MenuItem.
* - Only visible MenuItems are shown.
* - Disabled items are grayed out.
*/
public class AppMenu implements OnItemClickListener, OnKeyListener {
private static final float LAST_ITEM_SHOW_FRACTION = 0.5f;
private final Menu mMenu;
private final int mItemRowHeight;
private final int mItemDividerHeight;
private final int mVerticalFadeDistance;
private final int mNegativeSoftwareVerticalOffset;
private ListPopupWindow mPopup;
private AppMenuAdapter mAdapter;
private AppMenuHandler mHandler;
private int mCurrentScreenRotation = -1;
private boolean mIsByPermanentButton;
private AnimatorSet mMenuItemEnterAnimator;
private AnimatorListener mAnimationHistogramRecorder = AnimationFrameTimeHistogram
.getAnimatorRecorder("WrenchMenu.OpeningAnimationFrameTimes");
/**
* Creates and sets up the App Menu.
* @param menu Original menu created by the framework.
* @param itemRowHeight Desired height for each app menu row.
* @param itemDividerHeight Desired height for the divider between app menu items.
* @param handler AppMenuHandler receives callbacks from AppMenu.
* @param res Resources object used to get dimensions and style attributes.
*/
AppMenu(Menu menu, int itemRowHeight, int itemDividerHeight, AppMenuHandler handler,
Resources res) {
mMenu = menu;
mItemRowHeight = itemRowHeight;
assert mItemRowHeight > 0;
mHandler = handler;
mItemDividerHeight = itemDividerHeight;
assert mItemDividerHeight >= 0;
mNegativeSoftwareVerticalOffset =
res.getDimensionPixelSize(R.dimen.menu_negative_software_vertical_offset);
mVerticalFadeDistance = res.getDimensionPixelSize(R.dimen.menu_vertical_fade_distance);
}
/**
* Notifies the menu that the contents of the menu item specified by {@code menuRowId} have
* changed. This should be called if icons, titles, etc. are changing for a particular menu
* item while the menu is open.
* @param menuRowId The id of the menu item to change. This must be a row id and not a child
* id.
*/
public void menuItemContentChanged(int menuRowId) {
// Make sure we have all the valid state objects we need.
if (mAdapter == null || mMenu == null || mPopup == null || mPopup.getListView() == null) {
return;
}
// Calculate the item index.
int index = -1;
int menuSize = mMenu.size();
for (int i = 0; i < menuSize; i++) {
if (mMenu.getItem(i).getItemId() == menuRowId) {
index = i;
break;
}
}
if (index == -1) return;
// Check if the item is visible.
ListView list = mPopup.getListView();
int startIndex = list.getFirstVisiblePosition();
int endIndex = list.getLastVisiblePosition();
if (index < startIndex || index > endIndex) return;
// Grab the correct View.
View view = list.getChildAt(index - startIndex);
if (view == null) return;
// Cause the Adapter to re-populate the View.
list.getAdapter().getView(index, view, list);
}
/**
* Creates and shows the app menu anchored to the specified view.
*
* @param context The context of the AppMenu (ensure the proper theme is set on this context).
* @param anchorView The anchor {@link View} of the {@link ListPopupWindow}.
* @param isByPermanentButton Whether or not permanent hardware button triggered it. (oppose to
* software button or keyboard).
* @param screenRotation Current device screen rotation.
* @param visibleDisplayFrame The display area rect in which AppMenu is supposed to fit in.
* @param screenHeight Current device screen height.
* @param footerResourceId The resource id for a view to add to the end of the menu list.
* Can be 0 if no such view is required.
*/
void show(Context context, View anchorView, boolean isByPermanentButton, int screenRotation,
Rect visibleDisplayFrame, int screenHeight, int footerResourceId) {
mPopup = new ListPopupWindow(context, null, android.R.attr.popupMenuStyle);
mPopup.setModal(true);
mPopup.setAnchorView(anchorView);
mPopup.setInputMethodMode(PopupWindow.INPUT_METHOD_NOT_NEEDED);
int footerHeight = 0;
if (footerResourceId != 0) {
mPopup.setPromptPosition(ListPopupWindow.POSITION_PROMPT_BELOW);
View promptView = LayoutInflater.from(context).inflate(footerResourceId, null);
mPopup.setPromptView(promptView);
int measureSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
promptView.measure(measureSpec, measureSpec);
footerHeight = promptView.getMeasuredHeight();
}
mPopup.setOnDismissListener(new OnDismissListener() {
@Override
public void onDismiss() {
if (mPopup.getAnchorView() instanceof ImageButton) {
((ImageButton) mPopup.getAnchorView()).setSelected(false);
}
if (mMenuItemEnterAnimator != null) mMenuItemEnterAnimator.cancel();
mHandler.appMenuDismissed();
mHandler.onMenuVisibilityChanged(false);
}
});
// Some OEMs don't actually let us change the background... but they still return the
// padding of the new background, which breaks the menu height. If we still have a
// drawable here even though our style says @null we should use this padding instead...
Drawable originalBgDrawable = mPopup.getBackground();
// Need to explicitly set the background here. Relying on it being set in the style caused
// an incorrectly drawn background.
if (isByPermanentButton) {
mPopup.setBackgroundDrawable(
ApiCompatibilityUtils.getDrawable(context.getResources(), R.drawable.menu_bg));
} else {
mPopup.setBackgroundDrawable(ApiCompatibilityUtils.getDrawable(
context.getResources(), R.drawable.edge_menu_bg));
mPopup.setAnimationStyle(R.style.OverflowMenuAnim);
}
// Turn off window animations for low end devices.
if (SysUtils.isLowEndDevice()) mPopup.setAnimationStyle(0);
Rect bgPadding = new Rect();
mPopup.getBackground().getPadding(bgPadding);
int popupWidth = context.getResources().getDimensionPixelSize(R.dimen.menu_width)
+ bgPadding.left + bgPadding.right;
mPopup.setWidth(popupWidth);
mCurrentScreenRotation = screenRotation;
mIsByPermanentButton = isByPermanentButton;
// Extract visible items from the Menu.
int numItems = mMenu.size();
List<MenuItem> menuItems = new ArrayList<MenuItem>();
for (int i = 0; i < numItems; ++i) {
MenuItem item = mMenu.getItem(i);
if (item.isVisible()) {
menuItems.add(item);
}
}
Rect sizingPadding = new Rect(bgPadding);
if (isByPermanentButton && originalBgDrawable != null) {
Rect originalPadding = new Rect();
originalBgDrawable.getPadding(originalPadding);
sizingPadding.top = originalPadding.top;
sizingPadding.bottom = originalPadding.bottom;
}
// A List adapter for visible items in the Menu. The first row is added as a header to the
// list view.
mAdapter = new AppMenuAdapter(this, menuItems, LayoutInflater.from(context));
mPopup.setAdapter(mAdapter);
setMenuHeight(
menuItems.size(), visibleDisplayFrame, screenHeight, sizingPadding, footerHeight);
setPopupOffset(mPopup, mCurrentScreenRotation, visibleDisplayFrame, sizingPadding);
mPopup.setOnItemClickListener(this);
mPopup.show();
mPopup.getListView().setItemsCanFocus(true);
mPopup.getListView().setOnKeyListener(this);
mHandler.onMenuVisibilityChanged(true);
if (mVerticalFadeDistance > 0) {
mPopup.getListView().setVerticalFadingEdgeEnabled(true);
mPopup.getListView().setFadingEdgeLength(mVerticalFadeDistance);
}
// Don't animate the menu items for low end devices.
if (!SysUtils.isLowEndDevice()) {
mPopup.getListView().addOnLayoutChangeListener(new View.OnLayoutChangeListener() {
@Override
public void onLayoutChange(View v, int left, int top, int right, int bottom,
int oldLeft, int oldTop, int oldRight, int oldBottom) {
mPopup.getListView().removeOnLayoutChangeListener(this);
runMenuItemEnterAnimations();
}
});
}
}
private void setPopupOffset(
ListPopupWindow popup, int screenRotation, Rect appRect, Rect padding) {
int[] anchorLocation = new int[2];
popup.getAnchorView().getLocationInWindow(anchorLocation);
int anchorHeight = popup.getAnchorView().getHeight();
// If we have a hardware menu button, locate the app menu closer to the estimated
// hardware menu button location.
if (mIsByPermanentButton) {
int horizontalOffset = -anchorLocation[0];
switch (screenRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
horizontalOffset += (appRect.width() - mPopup.getWidth()) / 2;
break;
case Surface.ROTATION_90:
horizontalOffset += appRect.width() - mPopup.getWidth();
break;
case Surface.ROTATION_270:
break;
default:
assert false;
break;
}
popup.setHorizontalOffset(horizontalOffset);
// The menu is displayed above the anchored view, so shift the menu up by the bottom
// padding of the background.
popup.setVerticalOffset(-padding.bottom);
} else {
// The menu is displayed over and below the anchored view, so shift the menu up by the
// height of the anchor view.
popup.setVerticalOffset(-mNegativeSoftwareVerticalOffset - anchorHeight);
}
}
/**
* Handles clicks on the AppMenu popup.
* @param menuItem The menu item in the popup that was clicked.
*/
void onItemClick(MenuItem menuItem) {
if (menuItem.isEnabled()) {
dismiss();
mHandler.onOptionsItemSelected(menuItem);
}
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
onItemClick(mAdapter.getItem(position));
}
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if (mPopup == null || mPopup.getListView() == null) return false;
if (event.getKeyCode() == KeyEvent.KEYCODE_MENU) {
if (event.getAction() == KeyEvent.ACTION_DOWN && event.getRepeatCount() == 0) {
event.startTracking();
v.getKeyDispatcherState().startTracking(event, this);
return true;
} else if (event.getAction() == KeyEvent.ACTION_UP) {
v.getKeyDispatcherState().handleUpEvent(event);
if (event.isTracking() && !event.isCanceled()) {
dismiss();
return true;
}
}
}
return false;
}
/**
* Dismisses the app menu and cancels the drag-to-scroll if it is taking place.
*/
void dismiss() {
if (isShowing()) {
mPopup.dismiss();
}
}
/**
* @return Whether the app menu is currently showing.
*/
boolean isShowing() {
if (mPopup == null) {
return false;
}
return mPopup.isShowing();
}
/**
* @return ListPopupWindow that displays all the menu options.
*/
ListPopupWindow getPopup() {
return mPopup;
}
/**
* @return The menu instance inside of this class.
*/
@VisibleForTesting
public Menu getMenuForTest() {
return mMenu;
}
private void setMenuHeight(int numMenuItems, Rect appDimensions,
int screenHeight, Rect padding, int footerHeight) {
assert mPopup.getAnchorView() != null;
View anchorView = mPopup.getAnchorView();
int[] anchorViewLocation = new int[2];
anchorView.getLocationOnScreen(anchorViewLocation);
anchorViewLocation[1] -= appDimensions.top;
int anchorViewImpactHeight = mIsByPermanentButton ? anchorView.getHeight() : 0;
// Set appDimensions.height() for abnormal anchorViewLocation.
if (anchorViewLocation[1] > screenHeight) {
anchorViewLocation[1] = appDimensions.height();
}
int availableScreenSpace = Math.max(anchorViewLocation[1],
appDimensions.height() - anchorViewLocation[1] - anchorViewImpactHeight);
availableScreenSpace -= padding.bottom + footerHeight;
if (mIsByPermanentButton) availableScreenSpace -= padding.top;
int numCanFit = availableScreenSpace / (mItemRowHeight + mItemDividerHeight);
// Fade out the last item if we cannot fit all items.
if (numCanFit < numMenuItems) {
int spaceForFullItems = numCanFit * (mItemRowHeight + mItemDividerHeight);
int spaceForPartialItem = (int) (LAST_ITEM_SHOW_FRACTION * mItemRowHeight);
// Determine which item needs hiding.
if (spaceForFullItems + spaceForPartialItem < availableScreenSpace) {
mPopup.setHeight(spaceForFullItems + spaceForPartialItem
+ padding.top + padding.bottom);
} else {
mPopup.setHeight(spaceForFullItems - mItemRowHeight + spaceForPartialItem
+ padding.top + padding.bottom);
}
} else {
mPopup.setHeight(ViewGroup.LayoutParams.WRAP_CONTENT);
}
}
private void runMenuItemEnterAnimations() {
mMenuItemEnterAnimator = new AnimatorSet();
AnimatorSet.Builder builder = null;
ViewGroup list = mPopup.getListView();
for (int i = 0; i < list.getChildCount(); i++) {
View view = list.getChildAt(i);
Object animatorObject = view.getTag(R.id.menu_item_enter_anim_id);
if (animatorObject != null) {
if (builder == null) {
builder = mMenuItemEnterAnimator.play((Animator) animatorObject);
} else {
builder.with((Animator) animatorObject);
}
}
}
mMenuItemEnterAnimator.addListener(mAnimationHistogramRecorder);
mMenuItemEnterAnimator.start();
}
}
| |
package keywhiz.service.resources.automation.v2;
import com.codahale.metrics.annotation.ExceptionMetered;
import com.codahale.metrics.annotation.Timed;
import com.google.common.collect.Sets;
import io.dropwizard.auth.Auth;
import java.net.URI;
import java.net.URISyntaxException;
import java.time.Instant;
import java.util.HashMap;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
import javax.inject.Inject;
import javax.validation.Valid;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import keywhiz.api.automation.v2.ClientDetailResponseV2;
import keywhiz.api.automation.v2.CreateClientRequestV2;
import keywhiz.api.automation.v2.ModifyClientRequestV2;
import keywhiz.api.automation.v2.ModifyGroupsRequestV2;
import keywhiz.api.model.AutomationClient;
import keywhiz.api.model.Client;
import keywhiz.api.model.Group;
import keywhiz.api.model.SanitizedSecret;
import keywhiz.log.AuditLog;
import keywhiz.log.Event;
import keywhiz.log.EventTag;
import keywhiz.service.daos.AclDAO;
import keywhiz.service.daos.AclDAO.AclDAOFactory;
import keywhiz.service.daos.ClientDAO;
import keywhiz.service.daos.ClientDAO.ClientDAOFactory;
import keywhiz.service.daos.GroupDAO;
import keywhiz.service.daos.GroupDAO.GroupDAOFactory;
import keywhiz.service.exceptions.ConflictException;
import org.apache.commons.lang3.NotImplementedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.String.format;
import static java.util.stream.Collectors.toSet;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static keywhiz.Tracing.setTag;
import static keywhiz.Tracing.tagErrors;
/**
* parentEndpointName automation/v2-client-management
* <p>
* resourceDescription Automation endpoints to manage clients
*/
@Path("/automation/v2/clients")
public class ClientResource {
private static final Logger logger = LoggerFactory.getLogger(ClientResource.class);
private final AclDAO aclDAOReadOnly;
private final AclDAO aclDAOReadWrite;
private final ClientDAO clientDAOReadOnly;
private final ClientDAO clientDAOReadWrite;
private final GroupDAO groupDAOReadWrite;
private final AuditLog auditLog;
@Inject public ClientResource(AclDAOFactory aclDAOFactory, ClientDAOFactory clientDAOFactory,
GroupDAOFactory groupDAOFactory, AuditLog auditLog) {
this.aclDAOReadOnly = aclDAOFactory.readonly();
this.aclDAOReadWrite = aclDAOFactory.readwrite();
this.clientDAOReadOnly = clientDAOFactory.readonly();
this.clientDAOReadWrite = clientDAOFactory.readwrite();
this.groupDAOReadWrite = groupDAOFactory.readwrite();
this.auditLog = auditLog;
}
/**
* Creates a client and assigns to given groups
*
* @param request JSON request to create a client
* @return 200 if the client is created successfully, 409 if it already exists
* <p>
* responseMessage 201 Created client and assigned to given groups
* <p>
* responseMessage 409 Client already exists
*/
@Timed @ExceptionMetered
@POST
@Consumes(APPLICATION_JSON)
public Response createClient(@Auth AutomationClient automationClient,
@Valid CreateClientRequestV2 request) {
return tagErrors(() -> doCreateClient(automationClient, request));
}
private Response doCreateClient(AutomationClient automationClient,
CreateClientRequestV2 request) {
String creator = automationClient.getName();
String client = request.name();
setTag("client", client);
clientDAOReadWrite.getClientByName(client).ifPresent((c) -> {
logger.info("Automation ({}) - Client {} already exists", creator, client);
throw new ConflictException("Client name already exists.");
});
// Creates new client record
long clientId;
try {
clientId = clientDAOReadWrite.createClient(client, creator, request.description(),
new URI(request.spiffeId()));
} catch (URISyntaxException e) {
logger.info(format("Automation (%s) - Client %s could not be created because of invalid SPIFFE ID %s",
creator, client, request.spiffeId()), e);
throw new BadRequestException("Invalid SPIFFE ID provided (not a URI)");
}
auditLog.recordEvent(new Event(Instant.now(), EventTag.CLIENT_CREATE, creator, client));
// Enrolls client in any requested groups
groupsToGroupIds(request.groups())
.forEach((maybeGroupId) -> maybeGroupId.ifPresent(
(groupId) -> aclDAOReadWrite.findAndEnrollClient(clientId, groupId, auditLog, creator,
new HashMap<>())));
URI uri = UriBuilder.fromResource(ClientResource.class).path(client).build();
return Response.created(uri).build();
}
/**
* Retrieve listing of client names
* <p>
* responseMessage 200 List of client names
*/
@Timed @ExceptionMetered
@GET
@Produces(APPLICATION_JSON)
public Iterable<String> clientListing(@Auth AutomationClient automationClient) {
return clientDAOReadOnly.getClients().stream()
.map(Client::getName)
.collect(toSet());
}
/**
* Retrieve information on a client
*
* @param name Client name
* @return the retrieved client
* <p>
* responseMessage 200 Client information retrieved
* <p>
* responseMessage 404 Client not found
*/
@Timed @ExceptionMetered
@GET
@Path("{name}")
@Produces(APPLICATION_JSON)
public ClientDetailResponseV2 clientInfo(@Auth AutomationClient automationClient,
@PathParam("name") String name) {
Client client = clientDAOReadOnly.getClientByName(name)
.orElseThrow(NotFoundException::new);
return ClientDetailResponseV2.fromClient(client);
}
/**
* Listing of groups accessible to a client
*
* @param name Client name
* @return Listing of groups the client has membership to
* <p>
* responseMessage 200 Listing succeeded
* <p>
* responseMessage 404 Client not found
*/
@Timed @ExceptionMetered
@GET
@Path("{name}/groups")
@Produces(APPLICATION_JSON)
public Iterable<String> clientGroupsListing(@Auth AutomationClient automationClient,
@PathParam("name") String name) {
Client client = clientDAOReadOnly.getClientByName(name)
.orElseThrow(NotFoundException::new);
return aclDAOReadOnly.getGroupsFor(client).stream()
.map(Group::getName)
.collect(toSet());
}
/**
* Modify groups a client has membership in
*
* @param name Client name
* @param request JSON request specifying which groups to add or remove
* @return Listing of groups client has membership in
* <p>
* responseMessage 201 Client modified successfully
* <p>
* responseMessage 404 Client not found
*/
@Timed @ExceptionMetered
@PUT
@Path("{name}/groups")
@Produces(APPLICATION_JSON)
public Iterable<String> modifyClientGroups(@Auth AutomationClient automationClient,
@PathParam("name") String name, @Valid ModifyGroupsRequestV2 request) {
Client client = clientDAOReadWrite.getClientByName(name)
.orElseThrow(NotFoundException::new);
String user = automationClient.getName();
long clientId = client.getId();
Set<String> oldGroups = aclDAOReadWrite.getGroupsFor(client).stream()
.map(Group::getName)
.collect(toSet());
Set<String> groupsToAdd = Sets.difference(request.addGroups(), oldGroups);
Set<String> groupsToRemove = Sets.intersection(request.removeGroups(), oldGroups);
// TODO: should optimize AclDAO to use names and return only name column
groupsToGroupIds(groupsToAdd)
.forEach((maybeGroupId) -> maybeGroupId.ifPresent(
(groupId) -> aclDAOReadWrite.findAndEnrollClient(clientId, groupId, auditLog, user,
new HashMap<>())));
groupsToGroupIds(groupsToRemove)
.forEach((maybeGroupId) -> maybeGroupId.ifPresent(
(groupId) -> aclDAOReadWrite.findAndEvictClient(clientId, groupId, auditLog, user,
new HashMap<>())));
return aclDAOReadWrite.getGroupsFor(client).stream()
.map(Group::getName)
.collect(toSet());
}
/**
* Listing of secrets accessible to a client
*
* @param name Client name
* @return Listing of secrets accessible to client
* <p>
* responseMessage 200 Client lookup succeeded
* <p>
* responseMessage 404 Client not found
*/
@Timed @ExceptionMetered
@GET
@Path("{name}/secrets")
@Produces(APPLICATION_JSON)
public Iterable<String> clientSecretsListing(@Auth AutomationClient automationClient,
@PathParam("name") String name) {
Client client = clientDAOReadOnly.getClientByName(name)
.orElseThrow(NotFoundException::new);
return aclDAOReadOnly.getSanitizedSecretsFor(client).stream()
.map(SanitizedSecret::name)
.collect(toSet());
}
/**
* Delete a client
*
* @param name Client name
* @return 200 if the deletion was successful, 404 if the client was not found
* <p>
* responseMessage 204 Client deleted
* <p>
* responseMessage 404 Client not found
*/
@Timed @ExceptionMetered
@DELETE
@Path("{name}")
public Response deleteClient(@Auth AutomationClient automationClient,
@PathParam("name") String name) {
Client client = clientDAOReadWrite.getClientByName(name)
.orElseThrow(NotFoundException::new);
// Group memberships are deleted automatically by DB cascading.
clientDAOReadWrite.deleteClient(client);
auditLog.recordEvent(
new Event(Instant.now(), EventTag.CLIENT_DELETE, automationClient.getName(),
client.getName()));
return Response.noContent().build();
}
/**
* Modify a client
*
* @param currentName Client name
* @param request JSON request to modify the client
* @return the updated client
* <p>
* responseMessage 201 Client updated
* <p>
* responseMessage 404 Client not found
*/
@Timed @ExceptionMetered
@POST
@Path("{name}")
@Consumes(APPLICATION_JSON)
@Produces(APPLICATION_JSON)
public ClientDetailResponseV2 modifyClient(@Auth AutomationClient automationClient,
@PathParam("name") String currentName, @Valid ModifyClientRequestV2 request) {
Client client = clientDAOReadWrite.getClientByName(currentName)
.orElseThrow(NotFoundException::new);
String newName = request.name();
// TODO: implement change client (name, updatedAt, updatedBy)
throw new NotImplementedException(format(
"Need to implement mutation methods in DAO to rename %s to %s", client.getName(), newName));
}
private Stream<Optional<Long>> groupsToGroupIds(Set<String> groupNames) {
return groupNames.stream()
.map(groupDAOReadWrite::getGroup)
.map((group) -> group.map(Group::getId));
}
}
| |
/*
* Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: Param.java,v 1.2.4.1 2005/09/02 11:03:42 pvedula Exp $
*/
package com.sun.org.apache.xalan.internal.xsltc.compiler;
import com.sun.org.apache.bcel.internal.classfile.Field;
import com.sun.org.apache.bcel.internal.generic.BranchHandle;
import com.sun.org.apache.bcel.internal.generic.CHECKCAST;
import com.sun.org.apache.bcel.internal.generic.IFNONNULL;
import com.sun.org.apache.bcel.internal.generic.ConstantPoolGen;
import com.sun.org.apache.bcel.internal.generic.INVOKEVIRTUAL;
import com.sun.org.apache.bcel.internal.generic.Instruction;
import com.sun.org.apache.bcel.internal.generic.InstructionList;
import com.sun.org.apache.bcel.internal.generic.PUSH;
import com.sun.org.apache.bcel.internal.generic.PUTFIELD;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ClassGenerator;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ErrorMsg;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.MethodGenerator;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ReferenceType;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.Type;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.ObjectType;
import com.sun.org.apache.xalan.internal.xsltc.compiler.util.TypeCheckError;
import com.sun.org.apache.xalan.internal.xsltc.runtime.BasisLibrary;
/**
* @author Jacek Ambroziak
* @author Santiago Pericas-Geertsen
* @author Morten Jorgensen
* @author Erwin Bolwidt <ejb@klomp.org>
* @author John Howard <JohnH@schemasoft.com>
*/
final class Param extends VariableBase {
/**
* True if this Param is declared in a simple named template.
* This is used to optimize codegen for parameter passing
* in named templates.
*/
private boolean _isInSimpleNamedTemplate = false;
/**
* Display variable as single string
*/
public String toString() {
return "param(" + _name + ")";
}
/**
* Set the instruction for loading the value of this variable onto the
* JVM stack and returns the old instruction.
*/
public Instruction setLoadInstruction(Instruction instruction) {
Instruction tmp = _loadInstruction;
_loadInstruction = instruction;
return tmp;
}
/**
* Set the instruction for storing a value from the stack into this
* variable and returns the old instruction.
*/
public Instruction setStoreInstruction(Instruction instruction) {
Instruction tmp = _storeInstruction;
_storeInstruction = instruction;
return tmp;
}
/**
* Display variable in a full AST dump
*/
public void display(int indent) {
indent(indent);
System.out.println("param " + _name);
if (_select != null) {
indent(indent + IndentIncrement);
System.out.println("select " + _select.toString());
}
displayContents(indent + IndentIncrement);
}
/**
* Parse the contents of the <xsl:param> element. This method must read
* the 'name' (required) and 'select' (optional) attributes.
*/
public void parseContents(Parser parser) {
// Parse 'name' and 'select' attributes plus parameter contents
super.parseContents(parser);
// Add a ref to this param to its enclosing construct
final SyntaxTreeNode parent = getParent();
if (parent instanceof Stylesheet) {
// Mark this as a global parameter
_isLocal = false;
// Check if a global variable with this name already exists...
Param param = parser.getSymbolTable().lookupParam(_name);
// ...and if it does we need to check import precedence
if (param != null) {
final int us = this.getImportPrecedence();
final int them = param.getImportPrecedence();
// It is an error if the two have the same import precedence
if (us == them) {
final String name = _name.toString();
reportError(this, parser, ErrorMsg.VARIABLE_REDEF_ERR,name);
}
// Ignore this if previous definition has higher precedence
else if (them > us) {
_ignore = true;
copyReferences(param);
return;
}
else {
param.copyReferences(this);
param.disable();
}
}
// Add this variable if we have higher precedence
((Stylesheet)parent).addParam(this);
parser.getSymbolTable().addParam(this);
}
else if (parent instanceof Template) {
Template template = (Template) parent;
_isLocal = true;
template.addParameter(this);
if (template.isSimpleNamedTemplate()) {
_isInSimpleNamedTemplate = true;
}
}
}
/**
* Type-checks the parameter. The parameter type is determined by the
* 'select' expression (if present) or is a result tree if the parameter
* element has a body and no 'select' expression.
*/
public Type typeCheck(SymbolTable stable) throws TypeCheckError {
if (_select != null) {
_type = _select.typeCheck(stable);
if (_type instanceof ReferenceType == false && !(_type instanceof ObjectType)) {
_select = new CastExpr(_select, Type.Reference);
}
}
else if (hasContents()) {
typeCheckContents(stable);
}
_type = Type.Reference;
// This element has no type (the parameter does, but the parameter
// element itself does not).
return Type.Void;
}
public void translate(ClassGenerator classGen, MethodGenerator methodGen) {
final ConstantPoolGen cpg = classGen.getConstantPool();
final InstructionList il = methodGen.getInstructionList();
if (_ignore) return;
_ignore = true;
/*
* To fix bug 24518 related to setting parameters of the form
* {namespaceuri}localName which will get mapped to an instance
* variable in the class.
*/
final String name = BasisLibrary.mapQNameToJavaName(_name.toString());
final String signature = _type.toSignature();
final String className = _type.getClassName();
if (isLocal()) {
/*
* If simple named template then generate a conditional init of the
* param using its default value:
* if (param == null) param = <default-value>
*/
if (_isInSimpleNamedTemplate) {
il.append(loadInstruction());
BranchHandle ifBlock = il.append(new IFNONNULL(null));
translateValue(classGen, methodGen);
il.append(storeInstruction());
ifBlock.setTarget(il.append(NOP));
return;
}
il.append(classGen.loadTranslet());
il.append(new PUSH(cpg, name));
translateValue(classGen, methodGen);
il.append(new PUSH(cpg, true));
// Call addParameter() from this class
il.append(new INVOKEVIRTUAL(cpg.addMethodref(TRANSLET_CLASS,
ADD_PARAMETER,
ADD_PARAMETER_SIG)));
if (className != EMPTYSTRING) {
il.append(new CHECKCAST(cpg.addClass(className)));
}
_type.translateUnBox(classGen, methodGen);
if (_refs.isEmpty()) { // nobody uses the value
il.append(_type.POP());
_local = null;
}
else { // normal case
_local = methodGen.addLocalVariable2(name,
_type.toJCType(),
il.getEnd());
// Cache the result of addParameter() in a local variable
il.append(_type.STORE(_local.getIndex()));
}
}
else {
if (classGen.containsField(name) == null) {
classGen.addField(new Field(ACC_PUBLIC, cpg.addUtf8(name),
cpg.addUtf8(signature),
null, cpg.getConstantPool()));
il.append(classGen.loadTranslet());
il.append(DUP);
il.append(new PUSH(cpg, name));
translateValue(classGen, methodGen);
il.append(new PUSH(cpg, true));
// Call addParameter() from this class
il.append(new INVOKEVIRTUAL(cpg.addMethodref(TRANSLET_CLASS,
ADD_PARAMETER,
ADD_PARAMETER_SIG)));
_type.translateUnBox(classGen, methodGen);
// Cache the result of addParameter() in a field
if (className != EMPTYSTRING) {
il.append(new CHECKCAST(cpg.addClass(className)));
}
il.append(new PUTFIELD(cpg.addFieldref(classGen.getClassName(),
name, signature)));
}
}
}
}
| |
/*
* (c) Copyright 2022 Micro Focus, L.P.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0 which accompany this distribution.
*
* The Apache License is available at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.content.hashicorp.terraform.utils;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.cloudslang.content.utils.NumberUtilities;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import static io.cloudslang.content.hashicorp.terraform.utils.Constants.Common.*;
import static io.cloudslang.content.hashicorp.terraform.utils.Inputs.CommonInputs.ORGANIZATION_NAME;
import static io.cloudslang.content.hashicorp.terraform.utils.Inputs.CreateOrganizationInputs.EMAIL;
import static io.cloudslang.content.hashicorp.terraform.utils.Inputs.CreateVariableInputs.*;
import static io.cloudslang.content.hashicorp.terraform.utils.Inputs.CreateWorkspaceVariableInputs.*;
import static io.cloudslang.content.hashicorp.terraform.utils.Inputs.CreateWorkspaceInputs.VCS_REPO_ID;
import static io.cloudslang.content.hashicorp.terraform.utils.Inputs.CreateWorkspaceInputs.WORKSPACE_NAME;
import static io.cloudslang.content.hashicorp.terraform.utils.Outputs.CreateVariableOutputs.VARIABLE_ID;
import static io.cloudslang.content.hashicorp.terraform.utils.Outputs.CreateWorkspaceOutputs.WORKSPACE_ID;
import static io.cloudslang.content.hashicorp.terraform.utils.Outputs.ListOAuthClientOutputs.OAUTH_TOKEN_ID;
import static io.cloudslang.content.httpclient.entities.HttpClientInputs.*;
import static io.cloudslang.content.utils.BooleanUtilities.isValid;
import static io.cloudslang.content.utils.OtherUtilities.isValidIpPort;
import static org.apache.commons.lang3.StringUtils.isEmpty;
public final class InputsValidation {
@NotNull
public static List<String> verifyCommonInputs(@Nullable final String proxyPort,
@Nullable final String trust_all_roots,
@Nullable final String connectTimeout,
@Nullable final String socketTimeout,
@Nullable final String keepAlive,
@Nullable final String connectionsMaxPerRoute,
@Nullable final String connectionsMaxTotal) {
final List<String> exceptionMessages = new ArrayList<>();
addVerifyProxy(exceptionMessages, proxyPort, PROXY_PORT);
addVerifyBoolean(exceptionMessages, trust_all_roots, TRUST_ALL_ROOTS);
addVerifyNumber(exceptionMessages, connectTimeout, CONNECT_TIMEOUT);
addVerifyNumber(exceptionMessages, socketTimeout, SOCKET_TIMEOUT);
addVerifyBoolean(exceptionMessages, keepAlive, KEEP_ALIVE);
addVerifyNumber(exceptionMessages, connectionsMaxPerRoute, CONNECTIONS_MAX_PER_ROUTE);
addVerifyNumber(exceptionMessages, connectionsMaxTotal, CONNECTIONS_MAX_TOTAL);
return exceptionMessages;
}
@NotNull
public static List<String> verifyCreateVariableInputs(@Nullable final String workspaceId,
@Nullable final String variableCategory,@Nullable final String variableName,
@Nullable final String requestBody,String sensitiveRequestBody) {
final List<String> exceptionMessages = new ArrayList<>();
if(!variableName.isEmpty()) {
addVerifyString(exceptionMessages, workspaceId, WORKSPACE_ID);
addVerifyString(exceptionMessages,variableName , VARIABLE_NAME);
validateInputPropertyName(exceptionMessages, variableName, VARIABLE_NAME);
addVerifyString(exceptionMessages, variableCategory, VARIABLE_CATEGORY);
}else if(!requestBody.isEmpty()) {
addVerifyRequestBody(exceptionMessages, requestBody);
}else{
addVerifyRequestBody(exceptionMessages, sensitiveRequestBody);
}
return exceptionMessages;
}
@NotNull
public static List<String> verifyCreateWorkspaceVariableInputs(@Nullable final String workspaceId,
@Nullable final String workspaceVariableCategory,@Nullable final String workspaceVariableName,
@Nullable final String requestBody,String sensitiveRequestBody) {
final List<String> exceptionMessages = new ArrayList<>();
if(!workspaceVariableName.isEmpty()) {
addVerifyString(exceptionMessages, workspaceId, WORKSPACE_ID);
addVerifyString(exceptionMessages,workspaceVariableName , WORKSPACE_VARIABLE_NAME);
validateInputPropertyName(exceptionMessages, workspaceVariableName, WORKSPACE_VARIABLE_NAME);
addVerifyString(exceptionMessages, workspaceVariableCategory, WORKSPACE_VARIABLE_CATEGORY);
}else if(!requestBody.isEmpty()) {
addVerifyRequestBody(exceptionMessages, requestBody);
}else{
addVerifyRequestBody(exceptionMessages, sensitiveRequestBody);
}
return exceptionMessages;
}
@NotNull
public static List<String> verifyCreateVariablesInput(@Nullable final String variableJson,@Nullable final String sensitiveVariableJson) {
final List<String> exceptionMessages = new ArrayList<>();
if(!variableJson.isEmpty()){
addVerifyRequestBody(exceptionMessages,variableJson);
}else if(!sensitiveVariableJson.isEmpty()){
addVerifyRequestBody(exceptionMessages,sensitiveVariableJson);
} else{
addVerifyRequestBody(exceptionMessages,variableJson);
addVerifyRequestBody(exceptionMessages,sensitiveVariableJson);
}
return exceptionMessages;
}
@NotNull
public static List<String> verifyCreateWorkspaceVariablesInput(@Nullable final String workspaceVariableJson,@Nullable final String sensitiveWorkspaceVariableJson) {
final List<String> exceptionMessages = new ArrayList<>();
if(!workspaceVariableJson.isEmpty()){
addVerifyRequestBody(exceptionMessages,workspaceVariableJson);
}else if(!sensitiveWorkspaceVariableJson.isEmpty()){
addVerifyRequestBody(exceptionMessages,sensitiveWorkspaceVariableJson);
} else{
addVerifyRequestBody(exceptionMessages,workspaceVariableJson);
addVerifyRequestBody(exceptionMessages,sensitiveWorkspaceVariableJson);
}
return exceptionMessages;
}
@NotNull
public static List<String> verifyCreateRunInputs(@Nullable final String workspaceId,
@Nullable final String requestBody) {
final List<String> exceptionMessages = new ArrayList<>();
if (requestBody.isEmpty()) {
addVerifyString(exceptionMessages, workspaceId, WORKSPACE_ID);
} else {
addVerifyRequestBody(exceptionMessages, requestBody);
}
return exceptionMessages;
}
@NotNull
public static List<String> verifyCreateWorkspaceInputs(@Nullable final String workspaceName, @Nullable final
String vcsRepoId, @Nullable final String oauthTokenId, @Nullable final String requestBody) {
final List<String> exceptionMessages = new ArrayList<>();
if (requestBody.isEmpty()) {
addVerifyString(exceptionMessages, workspaceName, WORKSPACE_NAME);
validateInputPropertyName(exceptionMessages, workspaceName, WORKSPACE_NAME);
addVerifyString(exceptionMessages, vcsRepoId, VCS_REPO_ID);
addVerifyString(exceptionMessages, oauthTokenId, OAUTH_TOKEN_ID);
} else {
addVerifyRequestBody(exceptionMessages, requestBody);
}
return exceptionMessages;
}
@NotNull
public static List<String> verifyCreateOrganizationInputs(@Nullable final String organizationName, @Nullable final String email, @Nullable final String requestBody) {
final List<String> exceptionMessages = new ArrayList<>();
if (requestBody.isEmpty()) {
addVerifyString(exceptionMessages, organizationName, ORGANIZATION_NAME);
validateInputPropertyName(exceptionMessages, organizationName, ORGANIZATION_NAME);
addVerifyString(exceptionMessages, email, EMAIL);
} else {
addVerifyRequestBody(exceptionMessages, requestBody);
}
return exceptionMessages;
}
@NotNull
public static List<String> verifyUpdateVariableInputs(@Nullable final String variableId,
@Nullable final String requestBody) {
final List<String> exceptionMessages = new ArrayList<>();
if (requestBody.isEmpty()) {
addVerifyString(exceptionMessages, variableId, VARIABLE_ID);
} else {
addVerifyRequestBody(exceptionMessages, requestBody);
}
return exceptionMessages;
}
@NotNull
public static List<String> verifyUpdateVariableInputs(@Nullable final String variableId, @Nullable final String variableName, @Nullable final String variableValue, @Nullable final String variableCategory,
@Nullable final String requestBody) {
final List<String> exceptionMessages = new ArrayList<>();
if (requestBody.isEmpty()) {
addVerifyString(exceptionMessages, variableId, VARIABLE_ID);
addVerifyString(exceptionMessages, variableCategory, VARIABLE_CATEGORY);
} else {
addVerifyRequestBody(exceptionMessages, requestBody);
}
return exceptionMessages;
}
@NotNull
public static List<String> verifyGetWorkspaceDetailsInputs(@Nullable final String workspaceName) {
final List<String> exceptionMessages = new ArrayList<>();
addVerifyString(exceptionMessages, workspaceName, WORKSPACE_NAME);
validateInputPropertyName(exceptionMessages,workspaceName, WORKSPACE_NAME);
return exceptionMessages;
}
@NotNull
public static List<String> verifyGetOrganizationDetailsInputs(@Nullable final String organizationName) {
final List<String> exceptionMessages = new ArrayList<>();
addVerifyString(exceptionMessages, organizationName, ORGANIZATION_NAME);
validateInputPropertyName(exceptionMessages,organizationName, ORGANIZATION_NAME);
return exceptionMessages;
}
@NotNull
public static List<String> verifyGetCurrentStateVersionInputs(@Nullable final String workspaceId) {
final List<String> exceptionMessages = new ArrayList<>();
addVerifyString(exceptionMessages, workspaceId, WORKSPACE_ID);
return exceptionMessages;
}
@NotNull
public static List<String> validateInputPropertyName(@NotNull List<String> exceptions, @Nullable final String input,
@NotNull final String inputName) {
String regex = "[*a-zA-Z0-9_-]+";
boolean isWorkspaceNameValid = ((input.matches(regex)));
if (!isWorkspaceNameValid) {
exceptions.add(String.format(EXCEPTION_INVALID_NAME, input, inputName));
}
return exceptions;
}
@NotNull
public static List<String> verifyApplyRunRequestBody(@Nullable final String requestBody) {
final List<String> exceptionMessages = new ArrayList<>();
if (!requestBody.isEmpty()) {
addVerifyRequestBody(exceptionMessages, requestBody);
}
return exceptionMessages;
}
@NotNull
private static List<String> addVerifyProxy(@NotNull List<String> exceptions, @Nullable final String input,
@NotNull final String inputName) {
if (isEmpty(input)) {
exceptions.add(String.format(EXCEPTION_NULL_EMPTY, inputName));
} else if (!isValidIpPort(input)) {
exceptions.add(String.format(EXCEPTION_INVALID_PROXY, inputName));
}
return exceptions;
}
@NotNull
private static List<String> addVerifyString(@NotNull List<String> exceptions, @Nullable final String input,
@NotNull final String inputName) {
if (isEmpty(input)) {
exceptions.add(String.format(EXCEPTION_NULL_EMPTY, inputName));
}
return exceptions;
}
@NotNull
private static List<String> addVerifyBoolean(@NotNull List<String> exceptions, @Nullable final String input,
@NotNull final String inputName) {
if (isEmpty(input)) {
exceptions.add(String.format(EXCEPTION_NULL_EMPTY, inputName));
} else if (!isValid(input)) {
exceptions.add(String.format(EXCEPTION_INVALID_BOOLEAN, input, inputName));
}
return exceptions;
}
@NotNull
private static List<String> addVerifyRequestBody(@NotNull List<String> exceptions, @Nullable final String input) {
try {
ObjectMapper mapper = new ObjectMapper();
mapper.readTree(input);
} catch (Exception exception) {
exceptions.add(exception.getMessage());
}
return exceptions;
}
@NotNull
private static List<String> addVerifyNumber(@NotNull List<String> exceptions, @Nullable final String input,
@NotNull final String inputName) {
if (isEmpty(input)) {
exceptions.add(String.format(EXCEPTION_NULL_EMPTY, inputName));
} else if (!NumberUtilities.isValidInt(input)) {
exceptions.add(String.format(EXCEPTION_INVALID_NUMBER, input, inputName));
}
return exceptions;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.speech.v1.stub;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.BidiStreamingCallable;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.speech.v1.LongRunningRecognizeMetadata;
import com.google.cloud.speech.v1.LongRunningRecognizeRequest;
import com.google.cloud.speech.v1.LongRunningRecognizeResponse;
import com.google.cloud.speech.v1.RecognizeRequest;
import com.google.cloud.speech.v1.RecognizeResponse;
import com.google.cloud.speech.v1.StreamingRecognizeRequest;
import com.google.cloud.speech.v1.StreamingRecognizeResponse;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.GrpcOperationsStub;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the Speech service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcSpeechStub extends SpeechStub {
private static final MethodDescriptor<RecognizeRequest, RecognizeResponse>
recognizeMethodDescriptor =
MethodDescriptor.<RecognizeRequest, RecognizeResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.speech.v1.Speech/Recognize")
.setRequestMarshaller(ProtoUtils.marshaller(RecognizeRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(RecognizeResponse.getDefaultInstance()))
.build();
private static final MethodDescriptor<LongRunningRecognizeRequest, Operation>
longRunningRecognizeMethodDescriptor =
MethodDescriptor.<LongRunningRecognizeRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.speech.v1.Speech/LongRunningRecognize")
.setRequestMarshaller(
ProtoUtils.marshaller(LongRunningRecognizeRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.build();
private static final MethodDescriptor<StreamingRecognizeRequest, StreamingRecognizeResponse>
streamingRecognizeMethodDescriptor =
MethodDescriptor.<StreamingRecognizeRequest, StreamingRecognizeResponse>newBuilder()
.setType(MethodDescriptor.MethodType.BIDI_STREAMING)
.setFullMethodName("google.cloud.speech.v1.Speech/StreamingRecognize")
.setRequestMarshaller(
ProtoUtils.marshaller(StreamingRecognizeRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(StreamingRecognizeResponse.getDefaultInstance()))
.build();
private final UnaryCallable<RecognizeRequest, RecognizeResponse> recognizeCallable;
private final UnaryCallable<LongRunningRecognizeRequest, Operation> longRunningRecognizeCallable;
private final OperationCallable<
LongRunningRecognizeRequest, LongRunningRecognizeResponse, LongRunningRecognizeMetadata>
longRunningRecognizeOperationCallable;
private final BidiStreamingCallable<StreamingRecognizeRequest, StreamingRecognizeResponse>
streamingRecognizeCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcSpeechStub create(SpeechStubSettings settings) throws IOException {
return new GrpcSpeechStub(settings, ClientContext.create(settings));
}
public static final GrpcSpeechStub create(ClientContext clientContext) throws IOException {
return new GrpcSpeechStub(SpeechStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcSpeechStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcSpeechStub(
SpeechStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcSpeechStub, using the given settings. This is protected so that
* it is easy to make a subclass, but otherwise, the static factory methods should be preferred.
*/
protected GrpcSpeechStub(SpeechStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcSpeechCallableFactory());
}
/**
* Constructs an instance of GrpcSpeechStub, using the given settings. This is protected so that
* it is easy to make a subclass, but otherwise, the static factory methods should be preferred.
*/
protected GrpcSpeechStub(
SpeechStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<RecognizeRequest, RecognizeResponse> recognizeTransportSettings =
GrpcCallSettings.<RecognizeRequest, RecognizeResponse>newBuilder()
.setMethodDescriptor(recognizeMethodDescriptor)
.build();
GrpcCallSettings<LongRunningRecognizeRequest, Operation> longRunningRecognizeTransportSettings =
GrpcCallSettings.<LongRunningRecognizeRequest, Operation>newBuilder()
.setMethodDescriptor(longRunningRecognizeMethodDescriptor)
.build();
GrpcCallSettings<StreamingRecognizeRequest, StreamingRecognizeResponse>
streamingRecognizeTransportSettings =
GrpcCallSettings.<StreamingRecognizeRequest, StreamingRecognizeResponse>newBuilder()
.setMethodDescriptor(streamingRecognizeMethodDescriptor)
.build();
this.recognizeCallable =
callableFactory.createUnaryCallable(
recognizeTransportSettings, settings.recognizeSettings(), clientContext);
this.longRunningRecognizeCallable =
callableFactory.createUnaryCallable(
longRunningRecognizeTransportSettings,
settings.longRunningRecognizeSettings(),
clientContext);
this.longRunningRecognizeOperationCallable =
callableFactory.createOperationCallable(
longRunningRecognizeTransportSettings,
settings.longRunningRecognizeOperationSettings(),
clientContext,
operationsStub);
this.streamingRecognizeCallable =
callableFactory.createBidiStreamingCallable(
streamingRecognizeTransportSettings,
settings.streamingRecognizeSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<RecognizeRequest, RecognizeResponse> recognizeCallable() {
return recognizeCallable;
}
@Override
public UnaryCallable<LongRunningRecognizeRequest, Operation> longRunningRecognizeCallable() {
return longRunningRecognizeCallable;
}
@Override
public OperationCallable<
LongRunningRecognizeRequest, LongRunningRecognizeResponse, LongRunningRecognizeMetadata>
longRunningRecognizeOperationCallable() {
return longRunningRecognizeOperationCallable;
}
@Override
public BidiStreamingCallable<StreamingRecognizeRequest, StreamingRecognizeResponse>
streamingRecognizeCallable() {
return streamingRecognizeCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
| |
package org.sunflow.core.accel;
import java.io.FileWriter;
import java.io.IOException;
import org.sunflow.core.AccelerationStructure;
import org.sunflow.core.IntersectionState;
import org.sunflow.core.PrimitiveList;
import org.sunflow.core.Ray;
import org.sunflow.image.Color;
import org.sunflow.math.BoundingBox;
import org.sunflow.math.Point3;
import org.sunflow.system.Memory;
import org.sunflow.system.Timer;
import org.sunflow.system.UI;
import org.sunflow.system.UI.Module;
import org.sunflow.util.IntArray;
public class KDTree implements AccelerationStructure {
private int[] tree;
private int[] primitives;
private PrimitiveList primitiveList;
private BoundingBox bounds;
private int maxPrims;
private static final float INTERSECT_COST = 0.5f;
private static final float TRAVERSAL_COST = 1;
private static final float EMPTY_BONUS = 0.2f;
private static final int MAX_DEPTH = 64;
private static boolean dump = false;
private static String dumpPrefix = "kdtree";
public KDTree() {
this(0);
}
public KDTree(int maxPrims) {
this.maxPrims = maxPrims;
}
private static class BuildStats {
private int numNodes;
private int numLeaves;
private int sumObjects;
private int minObjects;
private int maxObjects;
private int sumDepth;
private int minDepth;
private int maxDepth;
private int numLeaves0;
private int numLeaves1;
private int numLeaves2;
private int numLeaves3;
private int numLeaves4;
private int numLeaves4p;
BuildStats() {
numNodes = numLeaves = 0;
sumObjects = 0;
minObjects = Integer.MAX_VALUE;
maxObjects = Integer.MIN_VALUE;
sumDepth = 0;
minDepth = Integer.MAX_VALUE;
maxDepth = Integer.MIN_VALUE;
numLeaves0 = 0;
numLeaves1 = 0;
numLeaves2 = 0;
numLeaves3 = 0;
numLeaves4 = 0;
numLeaves4p = 0;
}
void updateInner() {
numNodes++;
}
void updateLeaf(int depth, int n) {
numLeaves++;
minDepth = Math.min(depth, minDepth);
maxDepth = Math.max(depth, maxDepth);
sumDepth += depth;
minObjects = Math.min(n, minObjects);
maxObjects = Math.max(n, maxObjects);
sumObjects += n;
switch (n) {
case 0:
numLeaves0++;
break;
case 1:
numLeaves1++;
break;
case 2:
numLeaves2++;
break;
case 3:
numLeaves3++;
break;
case 4:
numLeaves4++;
break;
default:
numLeaves4p++;
break;
}
}
void printStats() {
UI.printDetailed(Module.ACCEL, "KDTree stats:");
UI.printDetailed(Module.ACCEL, " * Nodes: %d", numNodes);
UI.printDetailed(Module.ACCEL, " * Leaves: %d", numLeaves);
UI.printDetailed(Module.ACCEL, " * Objects: min %d", minObjects);
UI.printDetailed(Module.ACCEL, " avg %.2f", (float) sumObjects / numLeaves);
UI.printDetailed(Module.ACCEL, " avg(n>0) %.2f", (float) sumObjects / (numLeaves - numLeaves0));
UI.printDetailed(Module.ACCEL, " max %d", maxObjects);
UI.printDetailed(Module.ACCEL, " * Depth: min %d", minDepth);
UI.printDetailed(Module.ACCEL, " avg %.2f", (float) sumDepth / numLeaves);
UI.printDetailed(Module.ACCEL, " max %d", maxDepth);
UI.printDetailed(Module.ACCEL, " * Leaves w/: N=0 %3d%%", 100 * numLeaves0 / numLeaves);
UI.printDetailed(Module.ACCEL, " N=1 %3d%%", 100 * numLeaves1 / numLeaves);
UI.printDetailed(Module.ACCEL, " N=2 %3d%%", 100 * numLeaves2 / numLeaves);
UI.printDetailed(Module.ACCEL, " N=3 %3d%%", 100 * numLeaves3 / numLeaves);
UI.printDetailed(Module.ACCEL, " N=4 %3d%%", 100 * numLeaves4 / numLeaves);
UI.printDetailed(Module.ACCEL, " N>4 %3d%%", 100 * numLeaves4p / numLeaves);
}
}
public static void setDumpMode(boolean dump, String prefix) {
KDTree.dump = dump;
KDTree.dumpPrefix = prefix;
}
public void build(PrimitiveList primitives) {
UI.printDetailed(Module.ACCEL, "KDTree settings");
UI.printDetailed(Module.ACCEL, " * Max Leaf Size: %d", maxPrims);
UI.printDetailed(Module.ACCEL, " * Max Depth: %d", MAX_DEPTH);
UI.printDetailed(Module.ACCEL, " * Traversal cost: %.2f", TRAVERSAL_COST);
UI.printDetailed(Module.ACCEL, " * Intersect cost: %.2f", INTERSECT_COST);
UI.printDetailed(Module.ACCEL, " * Empty bonus: %.2f", EMPTY_BONUS);
UI.printDetailed(Module.ACCEL, " * Dump leaves: %s", dump ? "enabled" : "disabled");
Timer total = new Timer();
total.start();
this.primitiveList = primitives;
// get the object space bounds
bounds = primitives.getWorldBounds(null);
int nPrim = primitiveList.getNumPrimitives(), nSplits = 0;
BuildTask task = new BuildTask(nPrim);
Timer prepare = new Timer();
prepare.start();
for (int i = 0; i < nPrim; i++) {
for (int axis = 0; axis < 3; axis++) {
float ls = primitiveList.getPrimitiveBound(i, 2 * axis + 0);
float rs = primitiveList.getPrimitiveBound(i, 2 * axis + 1);
if (ls == rs) {
// flat in this dimension
task.splits[nSplits] = pack(ls, PLANAR, axis, i);
nSplits++;
} else {
task.splits[nSplits + 0] = pack(ls, OPENED, axis, i);
task.splits[nSplits + 1] = pack(rs, CLOSED, axis, i);
nSplits += 2;
}
}
}
task.n = nSplits;
prepare.end();
Timer t = new Timer();
IntArray tempTree = new IntArray();
IntArray tempList = new IntArray();
tempTree.add(0);
tempTree.add(1);
t.start();
// sort it
Timer sorting = new Timer();
sorting.start();
radix12(task.splits, task.n);
sorting.end();
// build the actual tree
BuildStats stats = new BuildStats();
buildTree(bounds.getMinimum().x, bounds.getMaximum().x, bounds.getMinimum().y, bounds.getMaximum().y, bounds.getMinimum().z, bounds.getMaximum().z, task, 1, tempTree, 0, tempList, stats);
t.end();
// write out final arrays
// free some memory
task = null;
tree = tempTree.trim();
tempTree = null;
this.primitives = tempList.trim();
tempList = null;
total.end();
// display some extra info
stats.printStats();
UI.printDetailed(Module.ACCEL, " * Node memory: %s", Memory.sizeof(tree));
UI.printDetailed(Module.ACCEL, " * Object memory: %s", Memory.sizeof(this.primitives));
UI.printDetailed(Module.ACCEL, " * Prepare time: %s", prepare);
UI.printDetailed(Module.ACCEL, " * Sorting time: %s", sorting);
UI.printDetailed(Module.ACCEL, " * Tree creation: %s", t);
UI.printDetailed(Module.ACCEL, " * Build time: %s", total);
if (dump) {
try {
UI.printInfo(Module.ACCEL, "Dumping mtls to %s.mtl ...", dumpPrefix);
FileWriter mtlFile = new FileWriter(dumpPrefix + ".mtl");
int maxN = stats.maxObjects;
for (int n = 0; n <= maxN; n++) {
float blend = (float) n / (float) maxN;
Color nc;
if (blend < 0.25)
nc = Color.blend(Color.BLUE, Color.GREEN, blend / 0.25f);
else if (blend < 0.5)
nc = Color.blend(Color.GREEN, Color.YELLOW, (blend - 0.25f) / 0.25f);
else if (blend < 0.75)
nc = Color.blend(Color.YELLOW, Color.RED, (blend - 0.50f) / 0.25f);
else
nc = Color.MAGENTA;
mtlFile.write(String.format("newmtl mtl%d\n", n));
float[] rgb = nc.getRGB();
mtlFile.write("Ka 0.1 0.1 0.1\n");
mtlFile.write(String.format("Kd %.12g %.12g %.12g\n", rgb[0], rgb[1], rgb[2]));
mtlFile.write("illum 1\n\n");
}
FileWriter objFile = new FileWriter(dumpPrefix + ".obj");
UI.printInfo(Module.ACCEL, "Dumping tree to %s.obj ...", dumpPrefix);
dumpObj(0, 0, maxN, new BoundingBox(bounds), objFile, mtlFile);
objFile.close();
mtlFile.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private int dumpObj(int offset, int vertOffset, int maxN, BoundingBox bounds, FileWriter file, FileWriter mtlFile) throws IOException {
if (offset == 0)
file.write(String.format("mtllib %s.mtl\n", dumpPrefix));
int nextOffset = tree[offset];
if ((nextOffset & (3 << 30)) == (3 << 30)) {
// leaf
int n = tree[offset + 1];
if (n > 0) {
// output the current voxel to the file
Point3 min = bounds.getMinimum();
Point3 max = bounds.getMaximum();
file.write(String.format("o node%d\n", offset));
file.write(String.format("v %g %g %g\n", max.x, max.y, min.z));
file.write(String.format("v %g %g %g\n", max.x, min.y, min.z));
file.write(String.format("v %g %g %g\n", min.x, min.y, min.z));
file.write(String.format("v %g %g %g\n", min.x, max.y, min.z));
file.write(String.format("v %g %g %g\n", max.x, max.y, max.z));
file.write(String.format("v %g %g %g\n", max.x, min.y, max.z));
file.write(String.format("v %g %g %g\n", min.x, min.y, max.z));
file.write(String.format("v %g %g %g\n", min.x, max.y, max.z));
int v0 = vertOffset;
file.write(String.format("usemtl mtl%d\n", n));
file.write("s off\n");
file.write(String.format("f %d %d %d %d\n", v0 + 1, v0 + 2, v0 + 3, v0 + 4));
file.write(String.format("f %d %d %d %d\n", v0 + 5, v0 + 8, v0 + 7, v0 + 6));
file.write(String.format("f %d %d %d %d\n", v0 + 1, v0 + 5, v0 + 6, v0 + 2));
file.write(String.format("f %d %d %d %d\n", v0 + 2, v0 + 6, v0 + 7, v0 + 3));
file.write(String.format("f %d %d %d %d\n", v0 + 3, v0 + 7, v0 + 8, v0 + 4));
file.write(String.format("f %d %d %d %d\n", v0 + 5, v0 + 1, v0 + 4, v0 + 8));
vertOffset += 8;
}
return vertOffset;
} else {
// node, recurse
int axis = nextOffset & (3 << 30), v0;
float split = Float.intBitsToFloat(tree[offset + 1]), min, max;
nextOffset &= ~(3 << 30);
switch (axis) {
case 0:
max = bounds.getMaximum().x;
bounds.getMaximum().x = split;
v0 = dumpObj(nextOffset, vertOffset, maxN, bounds, file, mtlFile);
// restore and go to other side
bounds.getMaximum().x = max;
min = bounds.getMinimum().x;
bounds.getMinimum().x = split;
v0 = dumpObj(nextOffset + 2, v0, maxN, bounds, file, mtlFile);
bounds.getMinimum().x = min;
break;
case 1 << 30:
max = bounds.getMaximum().y;
bounds.getMaximum().y = split;
v0 = dumpObj(nextOffset, vertOffset, maxN, bounds, file, mtlFile);
// restore and go to other side
bounds.getMaximum().y = max;
min = bounds.getMinimum().y;
bounds.getMinimum().y = split;
v0 = dumpObj(nextOffset + 2, v0, maxN, bounds, file, mtlFile);
bounds.getMinimum().y = min;
break;
case 2 << 30:
max = bounds.getMaximum().z;
bounds.getMaximum().z = split;
v0 = dumpObj(nextOffset, vertOffset, maxN, bounds, file, mtlFile);
// restore and go to other side
bounds.getMaximum().z = max;
min = bounds.getMinimum().z;
bounds.getMinimum().z = split;
v0 = dumpObj(nextOffset + 2, v0, maxN, bounds, file, mtlFile);
// restore and go to other side
bounds.getMinimum().z = min;
break;
default:
v0 = vertOffset;
break;
}
return v0;
}
}
// type is encoded as 2 shifted bits
private static final long CLOSED = 0L << 30;
private static final long PLANAR = 1L << 30;
private static final long OPENED = 2L << 30;
private static final long TYPE_MASK = 3L << 30;
// pack split values into a 64bit integer
private static long pack(float split, long type, int axis, int object) {
// pack float in sortable form
int f = Float.floatToRawIntBits(split);
int top = f ^ ((f >> 31) | 0x80000000);
long p = ((long) top & 0xFFFFFFFFL) << 32;
p |= type; // encode type as 2 bits
p |= ((long) axis) << 28; // encode axis as 2 bits
p |= (object & 0xFFFFFFFL); // pack object number
return p;
}
private static int unpackObject(long p) {
return (int) (p & 0xFFFFFFFL);
}
private static int unpackAxis(long p) {
return (int) (p >>> 28) & 3;
}
private static long unpackSplitType(long p) {
return p & TYPE_MASK;
}
private static float unpackSplit(long p) {
int f = (int) ((p >>> 32) & 0xFFFFFFFFL);
int m = ((f >>> 31) - 1) | 0x80000000;
return Float.intBitsToFloat(f ^ m);
}
// radix sort on top 36 bits - returns sorted result
private static void radix12(long[] splits, int n) {
// allocate working memory
final int[] hist = new int[2048];
final long[] sorted = new long[n];
// parallel histogramming pass
for (int i = 0; i < n; i++) {
long pi = splits[i];
hist[0x000 + ((int) (pi >>> 28) & 0x1FF)]++;
hist[0x200 + ((int) (pi >>> 37) & 0x1FF)]++;
hist[0x400 + ((int) (pi >>> 46) & 0x1FF)]++;
hist[0x600 + ((int) (pi >>> 55))]++;
}
// sum the histograms - each histogram entry records the number of
// values preceding itself.
{
int sum0 = 0, sum1 = 0, sum2 = 0, sum3 = 0;
int tsum;
for (int i = 0; i < 512; i++) {
tsum = hist[0x000 + i] + sum0;
hist[0x000 + i] = sum0 - 1;
sum0 = tsum;
tsum = hist[0x200 + i] + sum1;
hist[0x200 + i] = sum1 - 1;
sum1 = tsum;
tsum = hist[0x400 + i] + sum2;
hist[0x400 + i] = sum2 - 1;
sum2 = tsum;
tsum = hist[0x600 + i] + sum3;
hist[0x600 + i] = sum3 - 1;
sum3 = tsum;
}
}
// read/write histogram passes
for (int i = 0; i < n; i++) {
long pi = splits[i];
int pos = (int) (pi >>> 28) & 0x1FF;
sorted[++hist[0x000 + pos]] = pi;
}
for (int i = 0; i < n; i++) {
long pi = sorted[i];
int pos = (int) (pi >>> 37) & 0x1FF;
splits[++hist[0x200 + pos]] = pi;
}
for (int i = 0; i < n; i++) {
long pi = splits[i];
int pos = (int) (pi >>> 46) & 0x1FF;
sorted[++hist[0x400 + pos]] = pi;
}
for (int i = 0; i < n; i++) {
long pi = sorted[i];
int pos = (int) (pi >>> 55);
splits[++hist[0x600 + pos]] = pi;
}
}
private static class BuildTask {
long[] splits;
int numObjects;
int n;
byte[] leftRightTable;
BuildTask(int numObjects) {
splits = new long[6 * numObjects];
this.numObjects = numObjects;
n = 0;
// 2 bits per object
leftRightTable = new byte[(numObjects + 3) / 4];
}
BuildTask(int numObjects, BuildTask parent) {
splits = new long[6 * numObjects];
this.numObjects = numObjects;
n = 0;
leftRightTable = parent.leftRightTable;
}
}
private void buildTree(float minx, float maxx, float miny, float maxy, float minz, float maxz, BuildTask task, int depth, IntArray tempTree, int offset, IntArray tempList, BuildStats stats) {
// get node bounding box extents
if (task.numObjects > maxPrims && depth < MAX_DEPTH) {
float dx = maxx - minx;
float dy = maxy - miny;
float dz = maxz - minz;
// search for best possible split
float bestCost = INTERSECT_COST * task.numObjects;
int bestAxis = -1;
int bestOffsetStart = -1;
int bestOffsetEnd = -1;
float bestSplit = 0;
boolean bestPlanarLeft = false;
int bnl = 0, bnr = 0;
// inverse area of the bounding box (factor of 2 ommitted)
float area = (dx * dy + dy * dz + dz * dx);
float ISECT_COST = INTERSECT_COST / area;
// setup counts for each axis
int[] nl = { 0, 0, 0 };
int[] nr = { task.numObjects, task.numObjects, task.numObjects };
// setup bounds for each axis
float[] dp = { dy * dz, dz * dx, dx * dy };
float[] ds = { dy + dz, dz + dx, dx + dy };
float[] nodeMin = { minx, miny, minz };
float[] nodeMax = { maxx, maxy, maxz };
// search for best cost
int nSplits = task.n;
long[] splits = task.splits;
byte[] lrtable = task.leftRightTable;
for (int i = 0; i < nSplits;) {
// extract current split
long ptr = splits[i];
float split = unpackSplit(ptr);
int axis = unpackAxis(ptr);
// mark current position
int currentOffset = i;
// count number of primitives start/stopping/lying on the
// current plane
int pClosed = 0, pPlanar = 0, pOpened = 0;
long ptrMasked = ptr & (~TYPE_MASK & 0xFFFFFFFFF0000000L);
long ptrClosed = ptrMasked | CLOSED;
long ptrPlanar = ptrMasked | PLANAR;
long ptrOpened = ptrMasked | OPENED;
while (i < nSplits && (splits[i] & 0xFFFFFFFFF0000000L) == ptrClosed) {
int obj = unpackObject(splits[i]);
lrtable[obj >>> 2] = 0;
pClosed++;
i++;
}
while (i < nSplits && (splits[i] & 0xFFFFFFFFF0000000L) == ptrPlanar) {
int obj = unpackObject(splits[i]);
lrtable[obj >>> 2] = 0;
pPlanar++;
i++;
}
while (i < nSplits && (splits[i] & 0xFFFFFFFFF0000000L) == ptrOpened) {
int obj = unpackObject(splits[i]);
lrtable[obj >>> 2] = 0;
pOpened++;
i++;
}
// now we have summed all contributions from this plane
nr[axis] -= pPlanar + pClosed;
// compute cost
if (split >= nodeMin[axis] && split <= nodeMax[axis]) {
// left and right surface area (factor of 2 ommitted)
float dl = split - nodeMin[axis];
float dr = nodeMax[axis] - split;
float lp = dp[axis] + dl * ds[axis];
float rp = dp[axis] + dr * ds[axis];
// planar prims go to smallest cell always
boolean planarLeft = dl < dr;
int numLeft = nl[axis] + (planarLeft ? pPlanar : 0);
int numRight = nr[axis] + (planarLeft ? 0 : pPlanar);
float eb = ((numLeft == 0 && dl > 0) || (numRight == 0 && dr > 0)) ? EMPTY_BONUS : 0;
float cost = TRAVERSAL_COST + ISECT_COST * (1 - eb) * (lp * numLeft + rp * numRight);
if (cost < bestCost) {
bestCost = cost;
bestAxis = axis;
bestSplit = split;
bestOffsetStart = currentOffset;
bestOffsetEnd = i;
bnl = numLeft;
bnr = numRight;
bestPlanarLeft = planarLeft;
}
}
// move objects left
nl[axis] += pOpened + pPlanar;
}
// debug check for correctness of the scan
for (int axis = 0; axis < 3; axis++) {
int numLeft = nl[axis];
int numRight = nr[axis];
if (numLeft != task.numObjects || numRight != 0)
UI.printError(Module.ACCEL, "Didn't scan full range of objects @depth=%d. Left overs for axis %d: [L: %d] [R: %d]", depth, axis, numLeft, numRight);
}
// found best split?
if (bestAxis != -1) {
// allocate space for child nodes
BuildTask taskL = new BuildTask(bnl, task);
BuildTask taskR = new BuildTask(bnr, task);
int lk = 0, rk = 0;
for (int i = 0; i < bestOffsetStart; i++) {
long ptr = splits[i];
if (unpackAxis(ptr) == bestAxis) {
if (unpackSplitType(ptr) != CLOSED) {
int obj = unpackObject(ptr);
lrtable[obj >>> 2] |= 1 << ((obj & 3) << 1);
lk++;
}
}
}
for (int i = bestOffsetStart; i < bestOffsetEnd; i++) {
long ptr = splits[i];
assert unpackAxis(ptr) == bestAxis;
if (unpackSplitType(ptr) == PLANAR) {
if (bestPlanarLeft) {
int obj = unpackObject(ptr);
lrtable[obj >>> 2] |= 1 << ((obj & 3) << 1);
lk++;
} else {
int obj = unpackObject(ptr);
lrtable[obj >>> 2] |= 2 << ((obj & 3) << 1);
rk++;
}
}
}
for (int i = bestOffsetEnd; i < nSplits; i++) {
long ptr = splits[i];
if (unpackAxis(ptr) == bestAxis) {
if (unpackSplitType(ptr) != OPENED) {
int obj = unpackObject(ptr);
lrtable[obj >>> 2] |= 2 << ((obj & 3) << 1);
rk++;
}
}
}
// output new splits while maintaining order
long[] splitsL = taskL.splits;
long[] splitsR = taskR.splits;
int nsl = 0, nsr = 0;
for (int i = 0; i < nSplits; i++) {
long ptr = splits[i];
int obj = unpackObject(ptr);
int idx = obj >>> 2;
int mask = 1 << ((obj & 3) << 1);
if ((lrtable[idx] & mask) != 0) {
splitsL[nsl] = ptr;
nsl++;
}
if ((lrtable[idx] & (mask << 1)) != 0) {
splitsR[nsr] = ptr;
nsr++;
}
}
taskL.n = nsl;
taskR.n = nsr;
// free more memory
task.splits = splits = splitsL = splitsR = null;
task = null;
// allocate child nodes
int nextOffset = tempTree.getSize();
tempTree.add(0);
tempTree.add(0);
tempTree.add(0);
tempTree.add(0);
// create current node
tempTree.set(offset + 0, (bestAxis << 30) | nextOffset);
tempTree.set(offset + 1, Float.floatToRawIntBits(bestSplit));
// recurse for child nodes - free object arrays after each step
stats.updateInner();
switch (bestAxis) {
case 0:
buildTree(minx, bestSplit, miny, maxy, minz, maxz, taskL, depth + 1, tempTree, nextOffset, tempList, stats);
taskL = null;
buildTree(bestSplit, maxx, miny, maxy, minz, maxz, taskR, depth + 1, tempTree, nextOffset + 2, tempList, stats);
taskR = null;
return;
case 1:
buildTree(minx, maxx, miny, bestSplit, minz, maxz, taskL, depth + 1, tempTree, nextOffset, tempList, stats);
taskL = null;
buildTree(minx, maxx, bestSplit, maxy, minz, maxz, taskR, depth + 1, tempTree, nextOffset + 2, tempList, stats);
taskR = null;
return;
case 2:
buildTree(minx, maxx, miny, maxy, minz, bestSplit, taskL, depth + 1, tempTree, nextOffset, tempList, stats);
taskL = null;
buildTree(minx, maxx, miny, maxy, bestSplit, maxz, taskR, depth + 1, tempTree, nextOffset + 2, tempList, stats);
taskR = null;
return;
default:
assert false;
}
}
}
// create leaf node
int listOffset = tempList.getSize();
int n = 0;
for (int i = 0; i < task.n; i++) {
long ptr = task.splits[i];
if (unpackAxis(ptr) == 0 && unpackSplitType(ptr) != CLOSED) {
tempList.add(unpackObject(ptr));
n++;
}
}
stats.updateLeaf(depth, n);
if (n != task.numObjects)
UI.printError(Module.ACCEL, "Error creating leaf node - expecting %d found %d", task.numObjects, n);
tempTree.set(offset + 0, (3 << 30) | listOffset);
tempTree.set(offset + 1, task.numObjects);
// free some memory
task.splits = null;
}
public void intersect(Ray r, IntersectionState state) {
float intervalMin = r.getMin();
float intervalMax = r.getMax();
float orgX = r.ox;
float dirX = r.dx, invDirX = 1 / dirX;
float t1, t2;
t1 = (bounds.getMinimum().x - orgX) * invDirX;
t2 = (bounds.getMaximum().x - orgX) * invDirX;
if (invDirX > 0) {
if (t1 > intervalMin)
intervalMin = t1;
if (t2 < intervalMax)
intervalMax = t2;
} else {
if (t2 > intervalMin)
intervalMin = t2;
if (t1 < intervalMax)
intervalMax = t1;
}
if (intervalMin > intervalMax)
return;
float orgY = r.oy;
float dirY = r.dy, invDirY = 1 / dirY;
t1 = (bounds.getMinimum().y - orgY) * invDirY;
t2 = (bounds.getMaximum().y - orgY) * invDirY;
if (invDirY > 0) {
if (t1 > intervalMin)
intervalMin = t1;
if (t2 < intervalMax)
intervalMax = t2;
} else {
if (t2 > intervalMin)
intervalMin = t2;
if (t1 < intervalMax)
intervalMax = t1;
}
if (intervalMin > intervalMax)
return;
float orgZ = r.oz;
float dirZ = r.dz, invDirZ = 1 / dirZ;
t1 = (bounds.getMinimum().z - orgZ) * invDirZ;
t2 = (bounds.getMaximum().z - orgZ) * invDirZ;
if (invDirZ > 0) {
if (t1 > intervalMin)
intervalMin = t1;
if (t2 < intervalMax)
intervalMax = t2;
} else {
if (t2 > intervalMin)
intervalMin = t2;
if (t1 < intervalMax)
intervalMax = t1;
}
if (intervalMin > intervalMax)
return;
// compute custom offsets from direction sign bit
int offsetXFront = (Float.floatToRawIntBits(dirX) & (1 << 31)) >>> 30;
int offsetYFront = (Float.floatToRawIntBits(dirY) & (1 << 31)) >>> 30;
int offsetZFront = (Float.floatToRawIntBits(dirZ) & (1 << 31)) >>> 30;
int offsetXBack = offsetXFront ^ 2;
int offsetYBack = offsetYFront ^ 2;
int offsetZBack = offsetZFront ^ 2;
IntersectionState.StackNode[] stack = state.getStack();
int stackTop = state.getStackTop();
int stackPos = stackTop;
int node = 0;
while (true) {
int tn = tree[node];
int axis = tn & (3 << 30);
int offset = tn & ~(3 << 30);
switch (axis) {
case 0: {
float d = (Float.intBitsToFloat(tree[node + 1]) - orgX) * invDirX;
int back = offset + offsetXBack;
node = back;
if (d < intervalMin)
continue;
node = offset + offsetXFront; // front
if (d > intervalMax)
continue;
// push back node
stack[stackPos].node = back;
stack[stackPos].near = (d >= intervalMin) ? d : intervalMin;
stack[stackPos].far = intervalMax;
stackPos++;
// update ray interval for front node
intervalMax = (d <= intervalMax) ? d : intervalMax;
continue;
}
case 1 << 30: {
// y axis
float d = (Float.intBitsToFloat(tree[node + 1]) - orgY) * invDirY;
int back = offset + offsetYBack;
node = back;
if (d < intervalMin)
continue;
node = offset + offsetYFront; // front
if (d > intervalMax)
continue;
// push back node
stack[stackPos].node = back;
stack[stackPos].near = (d >= intervalMin) ? d : intervalMin;
stack[stackPos].far = intervalMax;
stackPos++;
// update ray interval for front node
intervalMax = (d <= intervalMax) ? d : intervalMax;
continue;
}
case 2 << 30: {
// z axis
float d = (Float.intBitsToFloat(tree[node + 1]) - orgZ) * invDirZ;
int back = offset + offsetZBack;
node = back;
if (d < intervalMin)
continue;
node = offset + offsetZFront; // front
if (d > intervalMax)
continue;
// push back node
stack[stackPos].node = back;
stack[stackPos].near = (d >= intervalMin) ? d : intervalMin;
stack[stackPos].far = intervalMax;
stackPos++;
// update ray interval for front node
intervalMax = (d <= intervalMax) ? d : intervalMax;
continue;
}
default: {
// leaf - test some objects
int n = tree[node + 1];
while (n > 0) {
primitiveList.intersectPrimitive(r, primitives[offset], state);
n--;
offset++;
}
if (r.getMax() < intervalMax)
return;
do {
// stack is empty?
if (stackPos == stackTop)
return;
// move back up the stack
stackPos--;
intervalMin = stack[stackPos].near;
if (r.getMax() < intervalMin)
continue;
node = stack[stackPos].node;
intervalMax = stack[stackPos].far;
break;
} while (true);
}
} // switch
} // traversal loop
}
}
| |
package uk.gov.dvsa.motr.web.component.subscription.service;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import uk.gov.dvsa.motr.notifications.service.NotifyService;
import uk.gov.dvsa.motr.web.component.subscription.exception.InvalidConfirmationIdException;
import uk.gov.dvsa.motr.web.component.subscription.helper.UrlHelper;
import uk.gov.dvsa.motr.web.component.subscription.model.SmsConfirmation;
import uk.gov.dvsa.motr.web.component.subscription.persistence.DynamoDbSmsConfirmationRepository;
import uk.gov.dvsa.motr.web.cookie.MotrSession;
import java.time.LocalDateTime;
import java.util.Optional;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static uk.gov.dvsa.motr.web.component.subscription.service.SmsConfirmationService.Confirmation.CODE_NOT_VALID;
import static uk.gov.dvsa.motr.web.component.subscription.service.SmsConfirmationService.Confirmation.CODE_NOT_VALID_MAX_ATTEMPTS_REACHED;
import static uk.gov.dvsa.motr.web.component.subscription.service.SmsConfirmationService.Confirmation.CODE_VALID;
public class SmsConfirmationServiceTest {
private DynamoDbSmsConfirmationRepository smsConfirmationRepository;
private final NotifyService notifyService = mock(NotifyService.class);
private final UrlHelper urlHelper = mock(UrlHelper.class);
private final MotrSession motrSession = mock(MotrSession.class);
private static final String TEST_VRM = "TEST-REG";
private static final String INCORRECT_TEST_VRM = "TEST-REG-123";
private static final String MOBILE = "07912345678";
private static final String INCORRECT_MOBILE = "07777777777";
private static final String CONFIRMATION_ID = "Asd";
private static final String CONFIRMATION_CODE = "123456";
private static final String INCORRECT_CONFIRMATION_CODE = "654321";
private static final int INITIAL_ATTEMPTS = 0;
private static final int INITIAL_RESEND_ATTEMPTS = 0;
private static final String PHONE_CONFIRMATION_LINK = "PHONE_CONFIRMATION_LINK";
private SmsConfirmationService smsConfirmationService;
@Before
public void setUp() {
smsConfirmationRepository = mock(DynamoDbSmsConfirmationRepository.class);
this.smsConfirmationService = new SmsConfirmationService(
smsConfirmationRepository,
notifyService,
urlHelper,
motrSession
);
when(urlHelper.phoneConfirmationLink()).thenReturn(PHONE_CONFIRMATION_LINK);
}
@Test
public void handleSmsConfirmationCreationWillCreateSmsConfirmation() throws InvalidConfirmationIdException {
ArgumentCaptor<SmsConfirmation> smsConfirmationArgumentCaptor = ArgumentCaptor.forClass(SmsConfirmation.class);
when(smsConfirmationRepository.findByConfirmationId(CONFIRMATION_ID)).thenReturn(Optional.empty());
String redirectUri = this.smsConfirmationService.handleSmsConfirmationCreation(TEST_VRM, MOBILE, CONFIRMATION_ID);
verify(smsConfirmationRepository, times(1)).saveWithResendTimestampUpdate(smsConfirmationArgumentCaptor.capture());
verify(notifyService, times(1)).sendPhoneNumberConfirmationSms(any(), any());
assertEquals(smsConfirmationArgumentCaptor.getValue().getAttempts(), INITIAL_ATTEMPTS);
assertEquals(smsConfirmationArgumentCaptor.getValue().getPhoneNumber(), MOBILE);
assertEquals(smsConfirmationArgumentCaptor.getValue().getVrm(), TEST_VRM);
assertEquals(smsConfirmationArgumentCaptor.getValue().getConfirmationId(), CONFIRMATION_ID);
assertEquals(smsConfirmationArgumentCaptor.getValue().getResendAttempts(), INITIAL_RESEND_ATTEMPTS);
assertEquals(PHONE_CONFIRMATION_LINK, redirectUri);
}
@Test
public void verifySmsConfirmationCodeWillReturnTrueWhenCodeIsValidForThatRecord() throws Exception {
SmsConfirmation smsConfirmation = new SmsConfirmation()
.setCode(CONFIRMATION_CODE)
.setConfirmationId(CONFIRMATION_ID)
.setPhoneNumber(MOBILE)
.setVrm(TEST_VRM);
withExpectedSmsConfirmation(Optional.of(smsConfirmation));
SmsConfirmationService.Confirmation smsConfirmationCodeVerified =
this.smsConfirmationService.verifySmsConfirmationCode(TEST_VRM, MOBILE, CONFIRMATION_ID, CONFIRMATION_CODE);
assertEquals(CODE_VALID.name(), smsConfirmationCodeVerified.name());
}
@Test(expected = InvalidConfirmationIdException.class)
public void verifySmsConfirmationCodeWillThrowInvalidConfirmationIdExceptionWhenNoRecordIsFound() throws Exception {
withExpectedSmsConfirmation(Optional.empty());
this.smsConfirmationService.verifySmsConfirmationCode(TEST_VRM, MOBILE, CONFIRMATION_ID, INCORRECT_CONFIRMATION_CODE);
}
@Test
public void verifySmsConfirmationCodeWillReturnFalseWhenCodeIsNotValidForThatRecord() throws Exception {
SmsConfirmation smsConfirmation = new SmsConfirmation()
.setCode(CONFIRMATION_CODE)
.setConfirmationId(CONFIRMATION_ID)
.setPhoneNumber(MOBILE)
.setVrm(TEST_VRM);
withExpectedSmsConfirmation(Optional.of(smsConfirmation));
SmsConfirmationService.Confirmation smsConfirmationCodeVerified = this.smsConfirmationService.verifySmsConfirmationCode(
TEST_VRM, MOBILE, CONFIRMATION_ID, INCORRECT_CONFIRMATION_CODE);
assertEquals(CODE_NOT_VALID, smsConfirmationCodeVerified);
}
@Test
public void verifySmsConfirmationCodeWillReturnFalseWhenVrmDoesNotMatchForThatRecord() throws Exception {
SmsConfirmation smsConfirmation = new SmsConfirmation()
.setCode(CONFIRMATION_CODE)
.setConfirmationId(CONFIRMATION_ID)
.setPhoneNumber(MOBILE)
.setVrm(TEST_VRM);
withExpectedSmsConfirmation(Optional.of(smsConfirmation));
SmsConfirmationService.Confirmation smsConfirmationCodeVerified = this.smsConfirmationService.verifySmsConfirmationCode(
INCORRECT_TEST_VRM, MOBILE, CONFIRMATION_ID, CONFIRMATION_CODE);
assertEquals(CODE_NOT_VALID, smsConfirmationCodeVerified);
}
@Test
public void verifySmsConfirmationCodeWillReturnFalseWhenPhoneNumberDoesNotMatchForThatRecord() throws Exception {
SmsConfirmation smsConfirmation = new SmsConfirmation()
.setCode(CONFIRMATION_CODE)
.setConfirmationId(CONFIRMATION_ID)
.setPhoneNumber(MOBILE)
.setVrm(TEST_VRM);
withExpectedSmsConfirmation(Optional.of(smsConfirmation));
SmsConfirmationService.Confirmation smsConfirmationCodeVerified = this.smsConfirmationService.verifySmsConfirmationCode(
TEST_VRM, INCORRECT_MOBILE, CONFIRMATION_ID, CONFIRMATION_CODE);
assertEquals(CODE_NOT_VALID, smsConfirmationCodeVerified);
}
@Test
public void resendSmsWillCorrectlyResendSmsWithTheSameConfirmationCode() throws Exception {
SmsConfirmation smsConfirmation = new SmsConfirmation()
.setCode(CONFIRMATION_CODE);
withExpectedSmsConfirmation(Optional.of(smsConfirmation));
String redirectUri = this.smsConfirmationService.resendSms(MOBILE, CONFIRMATION_ID);
verify(notifyService, times(1)).sendPhoneNumberConfirmationSms(MOBILE, CONFIRMATION_CODE);
assertEquals(PHONE_CONFIRMATION_LINK, redirectUri);
}
@Test(expected = InvalidConfirmationIdException.class)
public void resendSmsWillThrowInvalidConfirmationIdExceptionWhenNoRecordIsFound() throws Exception {
withExpectedSmsConfirmation(Optional.empty());
this.smsConfirmationService.resendSms(MOBILE, CONFIRMATION_ID);
}
@Test
public void whenThereIsAnExistingConfirmation_AndResendNotRestricted_NewConfirmationCreated() throws InvalidConfirmationIdException {
ArgumentCaptor<SmsConfirmation> smsConfirmationArgumentCaptor = ArgumentCaptor.forClass(SmsConfirmation.class);
SmsConfirmation existingConfirmation = new SmsConfirmation();
existingConfirmation.setResendAttempts(0);
existingConfirmation.setLatestResendAttempt(LocalDateTime.now());
existingConfirmation.setCode(CONFIRMATION_CODE);
existingConfirmation.setConfirmationId(CONFIRMATION_ID);
when(smsConfirmationRepository.findByConfirmationId(CONFIRMATION_ID)).thenReturn(Optional.of(existingConfirmation));
String redirectUri = this.smsConfirmationService.handleSmsConfirmationCreation(TEST_VRM, MOBILE, CONFIRMATION_ID);
verify(smsConfirmationRepository, times(1)).saveWithResendTimestampUpdate(smsConfirmationArgumentCaptor.capture());
verify(notifyService, times(1)).sendPhoneNumberConfirmationSms(any(), any());
assertEquals(smsConfirmationArgumentCaptor.getValue().getAttempts(), INITIAL_ATTEMPTS);
assertEquals(smsConfirmationArgumentCaptor.getValue().getConfirmationId(), CONFIRMATION_ID);
assertEquals(smsConfirmationArgumentCaptor.getValue().getResendAttempts(), INITIAL_RESEND_ATTEMPTS + 1);
assertEquals(PHONE_CONFIRMATION_LINK, redirectUri);
}
@Test
public void whenThereIsAnExistingConfirmation_AndResendIsRestricted_NewConfirmationIsNotCreated()
throws InvalidConfirmationIdException {
SmsConfirmation existingConfirmation = new SmsConfirmation();
existingConfirmation.setResendAttempts(4);
existingConfirmation.setLatestResendAttempt(LocalDateTime.now());
existingConfirmation.setCode(CONFIRMATION_CODE);
existingConfirmation.setConfirmationId(CONFIRMATION_ID);
when(smsConfirmationRepository.findByConfirmationId(CONFIRMATION_ID)).thenReturn(Optional.of(existingConfirmation));
this.smsConfirmationService.handleSmsConfirmationCreation(TEST_VRM, MOBILE, CONFIRMATION_ID);
verify(notifyService, times(0)).sendPhoneNumberConfirmationSms(any(), any());
verify(motrSession, times(1)).setSmsConfirmResendLimited(true);
}
@Test
public void whenCodeNotValidAndMaxAttemptsReached_thenCorrectResponseIsReturned() throws InvalidConfirmationIdException {
SmsConfirmation existingConfirmation = new SmsConfirmation();
existingConfirmation.setAttempts(2);
existingConfirmation.setLatestResendAttempt(LocalDateTime.now());
existingConfirmation.setCode(CONFIRMATION_CODE);
existingConfirmation.setConfirmationId(CONFIRMATION_ID);
existingConfirmation.setPhoneNumber(MOBILE);
existingConfirmation.setVrm(TEST_VRM);
when(smsConfirmationRepository.findByConfirmationId(CONFIRMATION_ID)).thenReturn(Optional.of(existingConfirmation));
SmsConfirmationService.Confirmation confirmation =
this.smsConfirmationService.verifySmsConfirmationCode(TEST_VRM, MOBILE, CONFIRMATION_ID, "XXXX");
assertEquals(CODE_NOT_VALID_MAX_ATTEMPTS_REACHED.name(), confirmation.name());
}
@Test
public void whenCodeIsValidButMaxAttemptsPreviouslyReached_thenCodeNotValidMaxAttemptsReachedResponseIsReturned()
throws InvalidConfirmationIdException {
SmsConfirmation existingConfirmation = new SmsConfirmation();
existingConfirmation.setAttempts(3);
existingConfirmation.setLatestResendAttempt(LocalDateTime.now());
existingConfirmation.setCode(CONFIRMATION_CODE);
existingConfirmation.setConfirmationId(CONFIRMATION_ID);
existingConfirmation.setPhoneNumber(MOBILE);
existingConfirmation.setVrm(TEST_VRM);
when(smsConfirmationRepository.findByConfirmationId(CONFIRMATION_ID)).thenReturn(Optional.of(existingConfirmation));
SmsConfirmationService.Confirmation confirmation =
this.smsConfirmationService.verifySmsConfirmationCode(TEST_VRM, MOBILE, CONFIRMATION_ID, CONFIRMATION_CODE);
assertEquals(CODE_NOT_VALID_MAX_ATTEMPTS_REACHED, confirmation);
}
@Test
public void whenCodeIsValidAndMaxAttemptsNotReached_thenCodeValidResponseIsReturned() throws InvalidConfirmationIdException {
SmsConfirmation existingConfirmation = new SmsConfirmation();
existingConfirmation.setAttempts(2);
existingConfirmation.setLatestResendAttempt(LocalDateTime.now());
existingConfirmation.setCode(CONFIRMATION_CODE);
existingConfirmation.setConfirmationId(CONFIRMATION_ID);
existingConfirmation.setPhoneNumber(MOBILE);
existingConfirmation.setVrm(TEST_VRM);
when(smsConfirmationRepository.findByConfirmationId(CONFIRMATION_ID)).thenReturn(Optional.of(existingConfirmation));
SmsConfirmationService.Confirmation confirmation =
this.smsConfirmationService.verifySmsConfirmationCode(TEST_VRM, MOBILE, CONFIRMATION_ID, CONFIRMATION_CODE);
assertEquals(CODE_VALID, confirmation);
}
private void withExpectedSmsConfirmation(Optional<SmsConfirmation> smsConfirmation) {
when(smsConfirmationRepository.findByConfirmationId(CONFIRMATION_ID)).thenReturn(smsConfirmation);
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// TODO(bleichen):
// - add tests for SHA1WithDSA with wrong key
// - add tests for "alternative" algorithm names
// - convert tests for deterministic DSA variants.
// Deterministic DSA has a few new drawbacks:
// * implementations flaws that generate k incorrectly can leak
// the key if multiple implementations (e.g. one correct one incorrect)
// is used.
// * timing attacks are more serious if the attacker can ask for the same
// signature multiple times, since this allows to get more accurate timings.
package com.google.security.wycheproof;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.security.wycheproof.WycheproofRunner.ProviderType;
import com.google.security.wycheproof.WycheproofRunner.SlowTest;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadMXBean;
import java.math.BigInteger;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.Signature;
import java.security.interfaces.DSAParams;
import java.security.interfaces.DSAPrivateKey;
import java.security.interfaces.DSAPublicKey;
import java.security.spec.DSAPrivateKeySpec;
import java.util.Arrays;
import javax.crypto.Cipher;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests DSA against invalid signatures. The motivation for this test is the DSA implementation in
* gpg4browsers. This implementation accepts signatures with r=1 and s=0 as valid.
*
* @author bleichen@google.com (Daniel Bleichenbacher)
*/
@RunWith(JUnit4.class)
public class DsaTest {
// Extract the integer r from a DSA signature.
// This method implicitely assumes that the DSA signature is DER encoded.
BigInteger extractR(byte[] signature) throws Exception {
int lengthR = signature[3];
return new BigInteger(Arrays.copyOfRange(signature, 4, 4 + lengthR));
}
BigInteger extractS(byte[] signature) throws Exception {
int lengthR = signature[3];
int startS = 4 + lengthR;
int lengthS = signature[startS + 1];
return new BigInteger(Arrays.copyOfRange(signature, startS + 2, startS + 2 + lengthS));
}
/** Extract the k that was used to sign the signature. Validates the k if check == true. */
BigInteger extractK(byte[] signature, BigInteger h, DSAPrivateKey priv, boolean check)
throws Exception {
BigInteger x = priv.getX();
BigInteger q = priv.getParams().getQ();
BigInteger r = extractR(signature);
BigInteger s = extractS(signature);
BigInteger k = x.multiply(r).add(h).multiply(s.modInverse(q)).mod(q);
if (check) {
BigInteger p = priv.getParams().getP();
BigInteger g = priv.getParams().getG();
BigInteger r2 = g.modPow(k, p).mod(q);
assertEquals(r.toString(), r2.toString());
}
return k;
}
/**
* Providers that implement SHA1WithDSA but not at least SHA256WithDSA are outdated and should be
* avoided even if DSA is currently not used in a project. Such providers promote using a weak
* signature scheme. It can also "inspire" developers to use invalid schemes such as SHA1WithDSA
* together with 2048-bit key. Such invalid use cases are often untested and can have serious
* flaws. For example the SUN provider leaked the private keys with 3 to 5 signatures in such
* instances.
*/
@Test
public void testOutdatedProvider() throws Exception {
try {
Signature sig = Signature.getInstance("SHA1WithDSA");
try {
Signature.getInstance("SHA256WithDSA");
} catch (NoSuchAlgorithmException ex) {
fail("Provider " + sig.getProvider().getName() + " is outdated and should not be used.");
}
} catch (NoSuchAlgorithmException ex) {
System.out.println("SHA1WithDSA is not supported");
}
}
/**
* This is just a test for basic functionality of DSA. The test generates a public and private
* key, generates a signature and verifies it. This test is slow with some providers, since
* some providers generate new DSA parameters (p and q) for each new key.
*/
@SlowTest(providers = {ProviderType.BOUNCY_CASTLE, ProviderType.SPONGY_CASTLE})
@SuppressWarnings("InsecureCryptoUsage")
@Test
public void testBasic() throws Exception {
int keySize = 2048;
String algorithm = "SHA256WithDSA";
String message = "Hello";
byte[] messageBytes = message.getBytes("UTF-8");
KeyPairGenerator generator = java.security.KeyPairGenerator.getInstance("DSA");
generator.initialize(keySize);
KeyPair keyPair = generator.generateKeyPair();
DSAPublicKey pub = (DSAPublicKey) keyPair.getPublic();
DSAPrivateKey priv = (DSAPrivateKey) keyPair.getPrivate();
Signature signer = Signature.getInstance(algorithm);
Signature verifier = Signature.getInstance(algorithm);
signer.initSign(priv);
signer.update(messageBytes);
byte[] signature = signer.sign();
verifier.initVerify(pub);
verifier.update(messageBytes);
assertTrue(verifier.verify(signature));
}
@SuppressWarnings("InsecureCryptoUsage")
public void testKeyGeneration(int keysize) throws Exception {
KeyPairGenerator generator = KeyPairGenerator.getInstance("DSA");
generator.initialize(keysize);
KeyPair keyPair = generator.generateKeyPair();
DSAPrivateKey priv = (DSAPrivateKey) keyPair.getPrivate();
DSAParams params = priv.getParams();
assertEquals(keysize, params.getP().bitLength());
// The NIST standard does not fully specify the size of q that
// must be used for a given key size. Hence there are differences.
// For example if keysize = 2048, then OpenSSL uses 256 bit q's by default,
// but the SUN provider uses 224 bits. Both are acceptable sizes.
// The tests below simply asserts that the size of q does not decrease the
// overall security of the DSA.
int qsize = params.getQ().bitLength();
switch (keysize) {
case 1024:
assertTrue("Invalid qsize for 1024 bit key:" + qsize, qsize >= 160);
break;
case 2048:
assertTrue("Invalid qsize for 2048 bit key:" + qsize, qsize >= 224);
break;
case 3072:
assertTrue("Invalid qsize for 3072 bit key:" + qsize, qsize >= 256);
break;
default:
fail("Invalid key size:" + keysize);
}
// Check the length of the private key.
// For example GPG4Browsers or the KJUR library derived from it use
// q.bitCount() instead of q.bitLength() to determine the size of the private key
// and hence would generate keys that are much too small.
assertTrue(priv.getX().bitLength() >= qsize - 32);
}
/**
* Tests the key generation for DSA.
*
* <p>Problems found:
*
* <ul>
* <li>CVE-2016-1000343 BouncyCastle before v.1.56 always generated DSA keys with a 160-bit q.
* </ul>
*/
@SlowTest(providers = {ProviderType.BOUNCY_CASTLE, ProviderType.SPONGY_CASTLE})
@Test
public void testKeyGenerationAll() throws Exception {
testKeyGeneration(1024);
testKeyGeneration(2048);
}
/**
* Checks the default key size used for DSA key generation.
*
* <p>This test uses NIST SP 800-57 part1 revision 4
* http://nvlpubs.nist.gov/nistpubs/SpecialPublications/NIST.SP.800-57pt1r4.pdf . Table 2 on page
* 53 recommends a minimal key length of 2048 bits for new keys used up to the year 2030.
*
* <p>While smaller keys may still be used for legacy cases, we think that such a choice should
* always be made by providing the desired key length during the initalization of the
* KeyPairGenerator.
*
* <p>This test may fail with old jdk versions. Oracle has changed the default size for DSA keys
* from 1024 bits to 2048 bits with https://bugs.java.com/bugdatabase/view_bug.do?bug_id=8184341 .
*/
@Test
public void testDefaultKeySize() throws Exception {
KeyPairGenerator keyGen = KeyPairGenerator.getInstance("DSA");
KeyPair keypair;
try {
keypair = keyGen.genKeyPair();
} catch (Exception ex) {
// Changing the default key size from 1024 bits to 2048 bits might be problematic for a
// provider, since SHA1WithDSA is the default algorithm.
// Hence, if a provider decides not to implement a default key size and requires that a user
// has to initialize the KeyPairGenerator then this should be acceptable behaviour.
System.out.println("Could not generate a key with default key size:" + ex.getMessage());
return;
}
DSAPublicKey pub = (DSAPublicKey) keypair.getPublic();
int keySizeInBits = pub.getParams().getP().bitLength();
System.out.println("testDefaultSize: keysize=" + keySizeInBits);
// checkKeyPair(keypair, keySizeInBits);
if (keySizeInBits < 2048) {
fail("DSA default key size too small:" + keySizeInBits);
}
}
/**
* Checks whether the one time key k in DSA is biased. For example the SUN provider fell for this
* test until April 2016.
*/
@SuppressWarnings("InsecureCryptoUsage")
@Test
public void testDsaBias() throws Exception {
// q is close to 2/3 * 2^160.
BigInteger q = new BigInteger("974317976835659416858874959372334979171063697271");
BigInteger p =
new BigInteger(
"1106803511314772711673172950296693567629309594518393175860816428"
+ "6658764043763662129010863568011543182924292444458455864283745070"
+ "9908516713302345161980412667892373845670780253725557376379049862"
+ "4062950082444499320797079243439689601679418602390654466821968220"
+ "32212146727497041502702331623782703855119908989712161");
BigInteger g =
new BigInteger(
"1057342118316953575810387190942009018497979302261477972033090351"
+ "7561815639397594841480480197745063606756857212792356354588585967"
+ "3837265237205154744016475608524531648654928648461175919672511710"
+ "4878976887505840764543501512668232945506391524642105449699321960"
+ "32410302985148400531470153936516167243072120845392903");
BigInteger x = new BigInteger("13706102843888006547723575730792302382646994436");
KeyFactory kf = KeyFactory.getInstance("DSA");
DSAPrivateKey priv = (DSAPrivateKey) kf.generatePrivate(new DSAPrivateKeySpec(x, p, q, g));
// If we make TESTS tests with a fair coin then the probability that
// either heads or tails appears less than MINCOUNT times is less than
// 2^{-32}.
// I.e. 2*sum(binomial(tests,i) for i in range(mincount))*2**32 < 2**tests
// Therefore the test below is not expected to fail unless the generation
// of the one time keys is indeed biased.
final int tests = 1024;
final int mincount = 410;
String hashAlgorithm = "SHA";
String message = "Hello";
byte[] messageBytes = message.getBytes("UTF-8");
byte[] digest = MessageDigest.getInstance(hashAlgorithm).digest(messageBytes);
BigInteger h = new BigInteger(1, digest);
final BigInteger qHalf = q.shiftRight(1);
Signature signer = Signature.getInstance("SHA1WithDSA");
signer.initSign(priv);
int countLsb = 0; // count the number of k's with msb set
int countMsb = 0; // count the number of k's with lsb set
for (int i = 0; i < tests; i++) {
signer.update(messageBytes);
byte[] signature = signer.sign();
BigInteger k = extractK(signature, h, priv, i < 10);
if (k.testBit(0)) {
countLsb++;
}
if (k.compareTo(qHalf) == 1) {
countMsb++;
}
}
if (countLsb < mincount || countLsb > tests - mincount) {
fail("Bias detected in the least significant bit of k:" + countLsb);
}
if (countMsb < mincount || countMsb > tests - mincount) {
fail("Bias detected in the most significant bit of k:" + countMsb);
}
}
/**
* Checks whether CVE-2016-0695 has been fixed. Before the April 2016 security update, the SUN
* provider had a serious flaw that leaked the private key with about 3-5 signatures. In
* particular, "Sha1WithDSA" always generated 160 bit k's independently of q. Unfortunately, it is
* easily possible to use 2048 and 3072 bit DSA keys together with SHA1WithDSA. All a user has to
* do is to use the algorithm name "DSA" instead of "SHA256WithDSA" rsp. "SHA224WithDSA".
*
* <p>An algorithm to extract the key from the signatures has been described for example in the
* paper <a href="http://www.hpl.hp.com/techreports/1999/HPL-1999-90.pdf">Lattice Attacks on
* Digital Signature Schemes</a> by N.A. Howgrave-Graham, N.P. Smart.
*
* <p>This bug is the same as US-CERT: VU # 940388: GnuPG generated ElGamal signatures that leaked
* the private key.
*/
@SlowTest(providers = {ProviderType.BOUNCY_CASTLE, ProviderType.SPONGY_CASTLE})
@SuppressWarnings("InsecureCryptoUsage")
@Test
public void testBiasSha1WithDSA() throws Exception {
String hashAlgorithm = "SHA";
String message = "Hello";
byte[] messageBytes = message.getBytes("UTF-8");
byte[] digest = MessageDigest.getInstance(hashAlgorithm).digest(messageBytes);
BigInteger h = new BigInteger(1, digest);
KeyPairGenerator generator = java.security.KeyPairGenerator.getInstance("DSA");
generator.initialize(2048);
KeyPair keyPair = generator.generateKeyPair();
DSAPrivateKey priv = (DSAPrivateKey) keyPair.getPrivate();
Signature signer = Signature.getInstance("DSA");
try {
// Private key and selected algorithm by signer do not match.
// Hence throwing an exception at this point would be the reasonable.
signer.initSign(priv);
signer.update(messageBytes);
byte[] signature = signer.sign();
BigInteger q = priv.getParams().getQ();
BigInteger k = extractK(signature, h, priv, true);
// Now check if k is heavily biased.
int lengthDiff = q.bitLength() - k.bitLength();
if (lengthDiff > 32) {
fail(
"Severly biased DSA signature:"
+ " len(q)="
+ q.bitLength()
+ " len(k)="
+ k.bitLength());
}
} catch (GeneralSecurityException ex) {
// The key is invalid, hence getting here is reasonable.
return;
}
}
/**
* This test checks for potential of a timing attack. The test generates a number of signatures,
* selects a fraction of them with a small timing and then compares the values k for the selected
* signatures with a normal distribution. The test fails if these ks are much smaller than
* expected. An implementation flaw that can lead to a test failure is to compute the signature
* with a modular exponentiation with a runtime that depend on the length of the exponent.
*
* <p>A failing test simply means that the timing can be used to get information about k. Further
* analysis is necessary to determine if the bias is exploitable and how many timings are
* necessary for an attack. A passing test does not mean that the implementation is secure against
* timing attacks. The test only catches relatively big timing differences. It requires high
* confidence to fail. Noise on the test machine can prevent that a relation between timing and k
* can be detected.
*
* <p>Claims of what is exploitable: http://www.hpl.hp.com/techreports/1999/HPL-1999-90.pdf 30
* signatures are sufficient to find the private key if the attacker knows 8 bits of each k.
* http://eprint.iacr.org/2004/277.pdf 27 signatures are sufficient if 8 bits of each k is known.
* Our own old experiments (using 1GB memory on a Pentium-4? CPU): 2^11 signatures are sufficient
* with a 3 bit leakage. 2^15 signatures are sufficient with a 2 bit leakage. 2^24 signatures are
* sufficient with a 1 bit leakage. Estimate for biased generation in the NIST standard: e.g. 2^22
* signatures, 2^40 memory, 2^64 time
*
* <p><b>Sample output for the SUN provider:</b> <code>
* count:50000 cutoff:4629300 relative average:0.9992225872624547 sigmas:0.3010906585642381
* count:25000 cutoff:733961 relative average:0.976146066585879 sigmas:6.532668708070148
* count:12500 cutoff:688305 relative average:0.9070352192339134 sigmas:18.00255238454385
* count:6251 cutoff:673971 relative average:0.7747148791368986 sigmas:30.850903417893825
* count:3125 cutoff:667045 relative average:0.5901994097874541 sigmas:39.67877152897901
* count:1563 cutoff:662088 relative average:0.4060286694971057 sigmas:40.67294313795137
* count:782 cutoff:657921 relative average:0.2577955312387898 sigmas:35.94906247333319
* count:391 cutoff:653608 relative average:0.1453438859272699 sigmas:29.271192100879457
* count:196 cutoff:649280 relative average:0.08035497211567771 sigmas:22.300206785132406
* count:98 cutoff:645122 relative average:0.05063589092661368 sigmas:16.27820353139225
* count:49 cutoff:641582 relative average:0.018255560447883384 sigmas:11.903018745467488
* count:25 cutoff:638235 relative average:0.009082660721102722 sigmas:8.581595888660086
* count:13 cutoff:633975 relative average:0.0067892346039088326 sigmas:6.20259924188633
* </code>
*
* <p><b>What this shows:</b> The first line uses all 50'000 signatures. The average k of these
* signatures is close to the expected value q/2. Being more selective gives us signatures with a
* more biased k. For example, the 196 signatures with the fastest timing have about a 3-bit bias.
* From this we expect that 2^19 signatures and timings are sufficient to find the private key.
*
* <p>A list of problems caught by this test:
*
* <ul>
* <li>CVE-2016-5548 OpenJDK8's DSA is vulnerable to timing attacks.
* <li>CVE-2016-1000341 BouncyCastle before v 1.56 is vulnernerable to timing attacks.
* </ul>
*/
@SlowTest(
providers = {ProviderType.BOUNCY_CASTLE, ProviderType.OPENJDK, ProviderType.SPONGY_CASTLE}
)
@SuppressWarnings("InsecureCryptoUsage")
@Test
public void testTiming() throws Exception {
ThreadMXBean bean = ManagementFactory.getThreadMXBean();
if (!bean.isCurrentThreadCpuTimeSupported()) {
System.out.println("getCurrentThreadCpuTime is not supported. Skipping");
return;
}
String hashAlgorithm = "SHA-1";
String message = "Hello";
byte[] messageBytes = message.getBytes("UTF-8");
byte[] digest = MessageDigest.getInstance(hashAlgorithm).digest(messageBytes);
BigInteger h = new BigInteger(1, digest);
KeyPairGenerator generator = java.security.KeyPairGenerator.getInstance("DSA");
generator.initialize(1024);
KeyPair keyPair = generator.generateKeyPair();
DSAPrivateKey priv = (DSAPrivateKey) keyPair.getPrivate();
Signature signer = Signature.getInstance("SHA1WITHDSA");
signer.initSign(priv);
// The timings below are quite noisy. Thus we need a large number of samples.
int samples = 50000;
long[] timing = new long[samples];
BigInteger[] k = new BigInteger[samples];
for (int i = 0; i < samples; i++) {
long start = bean.getCurrentThreadCpuTime();
signer.update(messageBytes);
byte[] signature = signer.sign();
timing[i] = bean.getCurrentThreadCpuTime() - start;
k[i] = extractK(signature, h, priv, false);
}
long[] sorted = Arrays.copyOf(timing, timing.length);
Arrays.sort(sorted);
// Here we are only interested in roughly the 8 most significant bits of the ks.
// Hence, using double is sufficiently precise.
double q = priv.getParams().getQ().doubleValue();
double expectedAverage = q / 2;
double maxSigmas = 0;
System.out.println("testTiming: SHA1WITHDSA");
for (int idx = samples - 1; idx > 10; idx /= 2) {
long cutoff = sorted[idx];
int count = 0;
double total = 0;
for (int i = 0; i < samples; i++) {
if (timing[i] <= cutoff) {
total += k[i].doubleValue();
count += 1;
}
}
double expectedStdDev = q / Math.sqrt(12 * count);
double average = total / count;
// Number of standard deviations that the average is away from
// the expected value:
double sigmas = Math.abs(expectedAverage - average) / expectedStdDev;
if (sigmas > maxSigmas) {
maxSigmas = sigmas;
}
System.out.println(
"count:"
+ count
+ " cutoff:"
+ cutoff
+ " relative average:"
+ (average / expectedAverage)
+ " sigmas:"
+ sigmas);
}
// Checks if the signatures with a small timing have a biased k.
// We use 7 standard deviations, so that the probability of a false positive is smaller
// than 10^{-10}.
if (maxSigmas >= 7) {
fail("Signatures with short timing have a biased k");
}
}
/**
* DSA does not allow encryption. This test verifies that a provider does not implement an ad hoc
* scheme that attempts to turn DSA into a public key encryption scheme.
*/
@SuppressWarnings("InsecureCryptoUsage")
@Test
public void testEncryptionWithDsa() throws Exception {
try {
Cipher cipher = Cipher.getInstance("DSA");
fail("DSA must not be used as a cipher:" + cipher.getProvider().toString());
} catch (NoSuchAlgorithmException ex) {
// This is expected
}
}
}
| |
/*************************DA-BOARD-LICENSE-START*********************************
* Copyright 2014 CapitalOne, LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************DA-BOARD-LICENSE-END*********************************/
package com.capitalone.dashboard.datafactory.versionone.test;
import static org.junit.Assert.*;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.capitalone.dashboard.datafactory.versionone.VersionOneDataFactoryImpl;
/**
* Tests all facets of the VerisonOneDataFactoryImpl class, which is responsible
* for handling all transactions to the source system, VersionOne.
*
* @author KFK884
*
*/
public class VersionOneDataFactoryImplTest {
private static Logger logger = LoggerFactory
.getLogger("VersionOneDataFactoryImplTest");
protected static String queryName;
protected static String query;
protected static String yesterday;
protected static DateFormat dateFormat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
protected static Map<String, String> auth;
protected static VersionOneDataFactoryImpl v1DataFactory;
/**
* Default constructor.
*/
public VersionOneDataFactoryImplTest() {
}
/**
* Runs actions before test is initialized.
*
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception {
logger.info("Beginning tests for com.capitalone.dashboard.datafactory.versionone.VersionOneDataFactoryImpl");
auth = new HashMap<String, String>();
auth.put("v1ProxyUrl", "http://proxy.kdc.capitalone.com:8099");
// TODO: Include your own base uri for VersionOne
auth.put("v1BaseUri", "");
// TODO: Include your own v1 auth token
auth.put("v1AccessToken", "");
Calendar cal = Calendar.getInstance();
cal.add(Calendar.DATE, -3);
yesterday = dateFormat.format(cal.getTime());
yesterday = yesterday.replace(" ", "T");
query = "from: Story\n" + "select:\n" + " - Number\n" + "filter:\n"
+ " - ChangeDate>'" + yesterday + "'\n"
+ " - (IsDeleted='False'|IsDeleted='True')\n";
v1DataFactory = new VersionOneDataFactoryImpl(auth);
}
/**
* Runs actions after test is complete.
*
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception {
v1DataFactory = null;
auth = null;
yesterday = null;
query = null;
}
/**
* Performs these actions before each test.
*
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
}
/**
* Performs these actions after each test completes.
*
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
}
/**
* Test method for
* {@link com.capitalone.dashboard.datafactory.versionone.VersionOneDataFactoryImpl#buildPagingQuery(int)}
* .
*/
@Test
public void testBuildPagingQuery() {
v1DataFactory.setPageSize(1);
v1DataFactory.buildPagingQuery(30);
assertNotNull("The basic query was created",
v1DataFactory.getPagingQuery());
assertEquals("The page size was accurate", 1,
v1DataFactory.getPageSize());
assertEquals("The page index was accurate", 30,
v1DataFactory.getPageIndex());
}
/**
* Test method for
* {@link com.capitalone.dashboard.datafactory.versionone.VersionOneDataFactoryImpl#getPagingQueryResponse()}
* .
*/
@Ignore
@Test
public void testGetPagingQueryResponse() {
v1DataFactory.setPageSize(1);
v1DataFactory.buildBasicQuery(query);
v1DataFactory.buildPagingQuery(0);
try {
JSONArray rs = v1DataFactory.getPagingQueryResponse();
/*
* Testing actual JSON for values
*/
JSONArray dataMainArry = new JSONArray();
JSONObject dataMainObj = new JSONObject();
dataMainArry = (JSONArray) rs.get(0);
dataMainObj = (JSONObject) dataMainArry.get(0);
// number
assertTrue("No valid Number was found", dataMainObj.get("Number")
.toString().length() >= 7);
} catch (NullPointerException npe) {
fail("There was a problem with an object used to connect to VersionOne during the test");
} catch (ArrayIndexOutOfBoundsException aioobe) {
fail("The object returned from VersionOne had no JSONObjects in it during the test; try increasing the scope of your test case query and try again.");
} catch (IndexOutOfBoundsException ioobe) {
logger.info("JSON artifact may be empty - re-running test to prove this out...");
JSONArray rs = v1DataFactory.getPagingQueryResponse();
/*
* Testing actual JSON for values
*/
String strRs = new String();
strRs = rs.toString();
assertEquals(
"There was nothing returned from VersionOne that is consistent with a valid response.",
"[[]]", strRs);
} catch (Exception e) {
fail("There was an unexpected problem while connecting to VersionOne during the test");
}
}
/**
* Test method for
* {@link com.capitalone.dashboard.datafactory.versionone.VersionOneDataFactoryImpl#VersionOneDataFactoryImpl()}
* .
*/
@Test
public void testVersionOneDataFactoryImpl() {
assertEquals("The compared contructed page size values did not match",
2000, v1DataFactory.getPageSize());
}
/**
* Test method for
* {@link com.capitalone.dashboard.datafactory.versionone.VersionOneDataFactoryImpl#VersionOneDataFactoryImpl(int)}
* .
*/
@Test
public void testVersionOneDataFactoryImplInt() {
v1DataFactory.setPageSize(1000);
assertEquals("The compared contructed page size values did not match",
1000, v1DataFactory.getPageSize());
}
/**
* Test method for
* {@link com.capitalone.dashboard.datafactory.versionone.VersionOneDataFactoryImpl#buildBasicQuery(java.lang.String)}
* .
*/
@Test
public void testBuildBasicQuery() {
v1DataFactory.setPageSize(1);
v1DataFactory.buildBasicQuery(query);
assertNotNull("The basic query was created",
v1DataFactory.getBasicQuery());
assertEquals("The page size was accurate", 1,
v1DataFactory.getPageSize());
assertEquals("The page index was accurate", 0,
v1DataFactory.getPageIndex());
}
/**
* Test method for
* {@link com.capitalone.dashboard.datafactory.versionone.VersionOneDataFactoryImpl#getQueryResponse(java.lang.String)}
* .
*/
@Ignore
@Test
public void testGetQueryResponse() {
v1DataFactory.setPageSize(1);
v1DataFactory.buildBasicQuery(query);
try {
JSONArray rs = v1DataFactory.getQueryResponse();
/*
* Testing actual JSON for values
*/
JSONArray dataMainArry = new JSONArray();
JSONObject dataMainObj = new JSONObject();
dataMainArry = (JSONArray) rs.get(0);
dataMainObj = (JSONObject) dataMainArry.get(0);
// number
assertTrue("No valid Number was found", dataMainObj.get("Number")
.toString().length() >= 7);
} catch (NullPointerException npe) {
fail("There was a problem with an object used to connect to VersionOne during the test");
} catch (ArrayIndexOutOfBoundsException aioobe) {
fail("The object returned from VersionOne had no JSONObjects in it during the test; try increasing the scope of your test case query and try again.");
} catch (IndexOutOfBoundsException ioobe) {
logger.info("JSON artifact may be empty - re-running test to prove this out...");
JSONArray rs = v1DataFactory.getQueryResponse();
/*
* Testing actual JSON for values
*/
String strRs = new String();
strRs = rs.toString();
assertEquals(
"There was nothing returned from VersionOne that is consistent with a valid response.",
"[[]]", strRs);
} catch (Exception e) {
fail("There was an unexpected problem while connecting to VersionOne during the test");
}
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.execution.store;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ImmutableList;
import com.streamsets.datacollector.config.PipelineConfiguration;
import com.streamsets.datacollector.event.handler.remote.RemoteDataCollector;
import com.streamsets.datacollector.execution.PipelineState;
import com.streamsets.datacollector.execution.PipelineStateStore;
import com.streamsets.datacollector.execution.PipelineStatus;
import com.streamsets.datacollector.main.RuntimeInfo;
import com.streamsets.datacollector.main.RuntimeModule;
import com.streamsets.datacollector.main.SlaveRuntimeInfo;
import com.streamsets.datacollector.runner.MockStages;
import com.streamsets.datacollector.stagelibrary.StageLibraryTask;
import com.streamsets.datacollector.store.PipelineStoreException;
import com.streamsets.datacollector.store.PipelineStoreTask;
import com.streamsets.datacollector.store.impl.FilePipelineStoreTask;
import com.streamsets.datacollector.util.Configuration;
import com.streamsets.datacollector.util.LockCache;
import com.streamsets.datacollector.util.LockCacheModule;
import com.streamsets.datacollector.util.TestUtil;
import com.streamsets.pipeline.api.ExecutionMode;
import dagger.Module;
import dagger.ObjectGraph;
import dagger.Provides;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class TestPipelineStateStore {
private static PipelineStateStore pipelineStateStore;
private static PipelineStoreTask pipelineStoreTask;
static class MockFilePipelineStateStore extends CachePipelineStateStore {
@Inject
public MockFilePipelineStateStore(PipelineStateStore pipelineStateStore, Configuration configuration) {
super(pipelineStateStore, configuration);
}
static boolean INVALIDATE_CACHE = false;
@Override
public PipelineState edited(String user, String name, String rev, ExecutionMode executionMode, boolean isRemote) throws PipelineStoreException {
PipelineState state = super.edited(user, name, rev, executionMode, isRemote);
if (INVALIDATE_CACHE) {
// invalidate cache
super.destroy();
}
return state;
}
@Override
public PipelineState saveState(String user, String name, String rev, PipelineStatus status, String message,
Map<String, Object> attributes, ExecutionMode executionMode, String metrics, int retryAttempt, long nextRetryTimeStamp) throws PipelineStoreException {
if (INVALIDATE_CACHE) {
super.destroy();
}
return super.saveState(user, name, rev, status, message, attributes, executionMode, metrics, retryAttempt, nextRetryTimeStamp);
}
}
@Module(injects = {PipelineStateStore.class, PipelineStoreTask.class}, library = true,
includes = {TestUtil.TestStageLibraryModule.class, LockCacheModule.class})
static class TestPipelineStateStoreModule {
@Provides @Singleton
public SlaveRuntimeInfo provideRuntimeInfo() {
return new SlaveRuntimeInfo(RuntimeModule.SDC_PROPERTY_PREFIX, new MetricRegistry(),
ImmutableList.of(getClass().getClassLoader()));
}
@Provides @Singleton
public Configuration provideConfiguration() {
return new Configuration();
}
@Provides @Singleton
public PipelineStateStore providePipelineStateStore(SlaveRuntimeInfo runtimeInfo, Configuration configuration) {
return new MockFilePipelineStateStore(new FilePipelineStateStore(runtimeInfo, configuration), configuration);
}
@Provides
@Singleton
public PipelineStoreTask providePipelineStore(
SlaveRuntimeInfo slaveRuntimeInfo,
StageLibraryTask stageLibraryTask,
PipelineStateStore pipelineStateStore,
LockCache<String> lockCache
) {
return new FilePipelineStoreTask(slaveRuntimeInfo, stageLibraryTask, pipelineStateStore, lockCache);
}
}
@BeforeClass
public static void beforeClass() throws IOException {
System.setProperty(RuntimeModule.SDC_PROPERTY_PREFIX + RuntimeInfo.DATA_DIR, "./target/var");
TestUtil.captureMockStages();
}
@AfterClass
public static void afterClass() throws IOException {
System.getProperties().remove(RuntimeModule.SDC_PROPERTY_PREFIX + RuntimeInfo.DATA_DIR);
}
@Before()
public void setUp() throws IOException {
File f = new File(System.getProperty(RuntimeModule.SDC_PROPERTY_PREFIX + RuntimeInfo.DATA_DIR));
FileUtils.deleteDirectory(f);
ObjectGraph objectGraph = ObjectGraph.create(TestPipelineStateStoreModule.class);
pipelineStateStore = objectGraph.get(PipelineStateStore.class);
pipelineStoreTask = objectGraph.get(PipelineStoreTask.class);
pipelineStoreTask.init();
}
private PipelineConfiguration createPipeline(UUID uuid) {
PipelineConfiguration pc = MockStages.createPipelineConfigurationWithClusterOnlyStage(ExecutionMode.CLUSTER_BATCH);
pc.setUuid(uuid);
return pc;
}
@After
public void tearDown() {
pipelineStoreTask.stop();
}
@Test
public void testCreatePipeline() throws Exception {
pipelineStoreTask.create("user2", "name1", "label", "description", false);
PipelineState pipelineState = pipelineStateStore.getState("name1", "0");
assertEquals("user2", pipelineState.getUser());
assertEquals("name1", pipelineState.getPipelineId());
assertEquals("0", pipelineState.getRev());
assertEquals(ExecutionMode.STANDALONE, pipelineState.getExecutionMode());
PipelineConfiguration pc0 = pipelineStoreTask.load("name1", "0");
pc0 = createPipeline(pc0.getUuid());
pipelineStoreTask.save("user3", "name1", "0", "execution mdoe changed", pc0);
pipelineState = pipelineStateStore.getState("name1", "0");
assertEquals("user3", pipelineState.getUser());
assertEquals("name1", pipelineState.getPipelineId());
assertEquals("0", pipelineState.getRev());
assertEquals(ExecutionMode.CLUSTER_BATCH, pipelineState.getExecutionMode());
pc0 = pipelineStoreTask.load("name1", "0");
pc0 = createPipeline(pc0.getUuid());
pipelineStoreTask.save("user4", "name1", "0", "execution mdoe same", pc0);
pipelineState = pipelineStateStore.getState("name1", "0");
// should still be user3 as we dont persist state file on each edit (unless the execution mode has changed)
assertEquals("user3", pipelineState.getUser());
}
@Test
public void testStateSaveNoCache() throws Exception {
MockFilePipelineStateStore.INVALIDATE_CACHE = true;
stateSave();
}
@Test
public void testStateSaveCache() throws Exception {
MockFilePipelineStateStore.INVALIDATE_CACHE = false;
stateSave();
}
@Test
public void testStateEditNoCache() throws Exception {
MockFilePipelineStateStore.INVALIDATE_CACHE = true;
stateEdit();
}
@Test
public void testStateEditCache() throws Exception {
MockFilePipelineStateStore.INVALIDATE_CACHE = false;
stateEdit();
}
@Test
public void testStateDeleteNoCache() throws Exception {
MockFilePipelineStateStore.INVALIDATE_CACHE = true;
stateDelete();
}
@Test
public void testStateDeleteCache() throws Exception {
MockFilePipelineStateStore.INVALIDATE_CACHE = false;
stateDelete();
}
@Test
public void stateHistory() throws Exception {
pipelineStateStore.saveState("user1", "aaa", "0", PipelineStatus.STOPPED, "Pipeline stopped", null, ExecutionMode.STANDALONE, null, 0, 0);
pipelineStateStore.saveState("user1", "aaa", "0", PipelineStatus.RUNNING, "Pipeline stopped", null, ExecutionMode.STANDALONE, null, 0, 0);
List<PipelineState> history = pipelineStateStore.getHistory("aaa", "0", true);
for (PipelineState pipelineState: history) {
assertEquals(PipelineStatus.RUNNING, pipelineState.getStatus());
assertEquals(PipelineStatus.STOPPED, pipelineState.getStatus());
}
}
@Test
public void stateChangeExecutionMode() throws Exception {
pipelineStateStore.saveState("user1", "aaa", "0", PipelineStatus.STOPPED, "Pipeline stopped", null, ExecutionMode.CLUSTER_BATCH, null, 0, 0);
PipelineState pipelineState = pipelineStateStore.getState("aaa", "0");
assertEquals(ExecutionMode.CLUSTER_BATCH, pipelineState.getExecutionMode());
pipelineStateStore.saveState("user1", "aaa", "0", PipelineStatus.STOPPED, "Pipeline stopped", null, ExecutionMode.STANDALONE, null, 0, 0);
pipelineState = pipelineStateStore.getState("aaa", "0");
assertEquals(ExecutionMode.STANDALONE, pipelineState.getExecutionMode());
}
@Test
public void testStateRemoteAttribute() throws Exception {
pipelineStateStore.edited("user2", "stateRemoteAttribute", "0", ExecutionMode.STANDALONE, true);
PipelineState pipelineState = pipelineStateStore.getState("stateRemoteAttribute", "0");
assertEquals(true, pipelineState.getAttributes().get(RemoteDataCollector.IS_REMOTE_PIPELINE));
pipelineStateStore.saveState("user2", "stateRemoteAttribute", "0", PipelineStatus.STOPPED, "Pipeline starting", null, ExecutionMode.STANDALONE, null, 0, 0);
pipelineStateStore.edited("user2", "stateRemoteAttribute", "0", ExecutionMode.CLUSTER_BATCH, false);
pipelineState = pipelineStateStore.getState("stateRemoteAttribute", "0");
assertEquals(true, pipelineState.getAttributes().get(RemoteDataCollector.IS_REMOTE_PIPELINE));
assertEquals(ExecutionMode.CLUSTER_BATCH, pipelineState.getExecutionMode());
pipelineStateStore.edited("user2", "stateRemoteAttribute1", "0", ExecutionMode.STANDALONE, false);
pipelineState = pipelineStateStore.getState("stateRemoteAttribute1", "0");
assertEquals(false, pipelineState.getAttributes().get(RemoteDataCollector.IS_REMOTE_PIPELINE));
pipelineStateStore.saveState("user1", "stateRemoteAttribute2", "0", PipelineStatus.EDITED, "Pipeline edited", null, ExecutionMode.STANDALONE, null, 0, 0);
pipelineState = pipelineStateStore.getState("stateRemoteAttribute2", "0");
assertEquals(false, pipelineStoreTask.isRemotePipeline("stateRemoteAttribute2", "0"));
}
public void stateSave() throws Exception {
pipelineStateStore.saveState("user1", "aaa", "0", PipelineStatus.EDITED, "Pipeline edited", null, ExecutionMode.STANDALONE, null, 0, 0);
PipelineState pipelineState = pipelineStateStore.getState("aaa", "0");
assertEquals("user1", pipelineState.getUser());
assertEquals("aaa", pipelineState.getPipelineId());
assertEquals("0", pipelineState.getRev());
assertEquals(PipelineStatus.EDITED, pipelineState.getStatus());
assertEquals("Pipeline edited", pipelineState.getMessage());
assertEquals(ExecutionMode.STANDALONE, pipelineState.getExecutionMode());
}
public void stateDelete() throws Exception {
pipelineStateStore.saveState("user1", "aaa", "0", PipelineStatus.STOPPED, "Pipeline stopped", null, ExecutionMode.STANDALONE, null, 0, 0);
pipelineStateStore.delete("aaa", "0");
try {
pipelineStateStore.getState("aaa", "0");
fail("Expected exception but didn't get any");
} catch (PipelineStoreException ex) {
// expected
}
}
public void stateEdit() throws Exception {
pipelineStateStore.saveState("user1", "aaa", "0", PipelineStatus.STOPPED, "Pipeline stopped", null, ExecutionMode.STANDALONE, null, 0, 0);
pipelineStateStore.edited("user2", "aaa", "0", ExecutionMode.STANDALONE, false);
PipelineState pipelineState = pipelineStateStore.getState("aaa", "0");
assertEquals("user2", pipelineState.getUser());
assertEquals("aaa", pipelineState.getPipelineId());
assertEquals("0", pipelineState.getRev());
assertEquals(PipelineStatus.EDITED, pipelineState.getStatus());
pipelineStateStore.saveState("user1", "aaa", "0", PipelineStatus.RUNNING, "Pipeline running", null, ExecutionMode.STANDALONE, null, 0, 0);
try {
pipelineStateStore.edited("user2", "aaa", "0", ExecutionMode.STANDALONE, false);
fail("Expected exception but didn't get any");
} catch (IllegalStateException ex) {
// expected
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.lucene.util;
import java.util.Iterator;
import com.google.common.collect.Iterables;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.core.ImmutableRoot;
import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
import org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants;
import org.apache.jackrabbit.oak.plugins.tree.factories.TreeFactory;
import org.apache.jackrabbit.oak.spi.filter.PathFilter;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStateUtils;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.Test;
import static com.google.common.collect.ImmutableList.of;
import static java.util.Arrays.asList;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEPRECATED;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_TAGS;
import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.REINDEX_PROPERTY_NAME;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.AGGREGATES;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.FIELD_BOOST;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.PROP_FACETS;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.PROP_REFRESH_DEFN;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.COST_PER_ENTRY;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.COST_PER_EXECUTION;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.BLOB_SIZE;
import static org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.PROP_WEIGHT;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.apache.jackrabbit.oak.plugins.memory.MultiStringPropertyState.stringProperty;
import static org.junit.Assert.*;
public class IndexDefinitionBuilderTest {
private IndexDefinitionBuilder builder = new IndexDefinitionBuilder();
private NodeBuilder nodeBuilder = EMPTY_NODE.builder();
@After
public void dumpState(){
System.out.println(NodeStateUtils.toString(builder.build()));
}
@Test
public void defaultSetup() throws Exception{
NodeState state = builder.build();
assertEquals(2, state.getLong("compatVersion"));
assertEquals("async", state.getString("async"));
assertEquals("lucene", state.getString("type"));
}
@Test
public void indexRule() throws Exception{
builder.includedPaths("/a", "/b");
builder.queryPaths("/c", "/d");
builder.supersedes("/e", "/f");
builder.indexRule("nt:base")
.property("foo")
.ordered()
.enclosingRule()
.property("bar")
.analyzed()
.propertyIndex()
.enclosingRule()
.property("baz")
.propertyIndex();
NodeState state = builder.build();
assertTrue(state.getChildNode("indexRules").exists());
assertTrue(state.getChildNode("indexRules").getChildNode("nt:base").exists());
assertEquals(asList("/a", "/b"), state.getProperty(PathFilter.PROP_INCLUDED_PATHS).getValue(Type.STRINGS));
assertEquals(asList("/c", "/d"), state.getProperty(IndexConstants.QUERY_PATHS).getValue(Type.STRINGS));
assertEquals(asList("/e", "/f"), state.getProperty(IndexConstants.SUPERSEDED_INDEX_PATHS).getValue(Type.STRINGS));
}
@Test
public void propertyDefIndexPropertySetIndexFalse() throws Exception {
builder.indexRule("nt:base")
.property("foo")
.disable();
PropertyState state = builder.build().
getChildNode("indexRules").
getChildNode("nt:base").
getChildNode("properties").
getChildNode("foo").
getProperty("index");
assertNotNull("index property must exist", state);
assertFalse("Incorrect default value of index property", state.getValue(Type.BOOLEAN));
}
@Test
public void aggregates() throws Exception{
builder.aggregateRule("cq:Page").include("jcr:content").relativeNode();
builder.aggregateRule("dam:Asset", "*", "*/*");
NodeState state = builder.build();
assertTrue(state.getChildNode("aggregates").exists());
assertTrue(state.getChildNode("aggregates").getChildNode("dam:Asset").exists());
assertTrue(state.getChildNode("aggregates").getChildNode("cq:Page").exists());
}
@Test
public void duplicatePropertyName() throws Exception{
builder.indexRule("nt:base")
.property("foo")
.ordered()
.enclosingRule()
.property("jcr:content/foo")
.analyzed()
.propertyIndex()
.enclosingRule()
.property("metadata/content/foo")
.propertyIndex();
NodeState state = builder.build();
assertTrue(state.getChildNode("indexRules").exists());
assertTrue(state.getChildNode("indexRules").getChildNode("nt:base").exists());
assertEquals(3, state.getChildNode("indexRules").getChildNode("nt:base")
.getChildNode("properties").getChildNodeCount(10));
}
@Test
public void ruleOrder() throws Exception{
builder.indexRule("nt:unstructured");
builder.indexRule("nt:base");
Tree tree = TreeFactory.createTree(EMPTY_NODE.builder());
builder.build(tree);
//Assert the order
Iterator<Tree> children = tree.getChild("indexRules").getChildren().iterator();
assertEquals("nt:unstructured", children.next().getName());
assertEquals("nt:base", children.next().getName());
}
@Test
public void regexProperty() throws Exception{
builder.indexRule("nt:base")
.property(FulltextIndexConstants.REGEX_ALL_PROPS, true);
NodeState state = builder.build();
assertTrue(NodeStateUtils.getNode(state, "indexRules/nt:base/properties/prop")
.getBoolean(FulltextIndexConstants.PROP_IS_REGEX));
}
@Test
public void mergeExisting() throws Exception{
nodeBuilder.setProperty("foo", "bar");
builder = new IndexDefinitionBuilder(nodeBuilder);
NodeState state = builder.build();
assertEquals("bar", state.getString("foo"));
assertEquals("async", state.getString("async"));
}
@Test
public void mergeExisting_IndexRule() throws Exception{
builder.indexRule("nt:unstructured").property("foo").propertyIndex();
nodeBuilder = builder.build().builder();
builder = new IndexDefinitionBuilder(nodeBuilder);
assertTrue(builder.hasIndexRule("nt:unstructured"));
assertFalse(builder.hasIndexRule("nt:base"));
builder.indexRule("nt:unstructured").property("bar").propertyIndex();
builder.indexRule("nt:base");
assertTrue(builder.indexRule("nt:unstructured").hasPropertyRule("foo"));
assertTrue(builder.indexRule("nt:unstructured").hasPropertyRule("bar"));
}
@Test
public void mergeExisting_Aggregates() throws Exception{
builder.aggregateRule("foo").include("/path1");
builder.aggregateRule("foo").include("/path2");
nodeBuilder = builder.build().builder();
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.aggregateRule("foo").include("/path1");
builder.aggregateRule("foo").include("/path3");
NodeState state = builder.build();
assertEquals(3, state.getChildNode(AGGREGATES).getChildNode("foo").getChildNodeCount(100));
}
@Test
public void noReindexIfNoChange() throws Exception{
builder.includedPaths("/a", "/b");
builder.indexRule("nt:base")
.property("foo")
.ordered();
nodeBuilder = builder.build().builder();
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.includedPaths("/a", "/b");
assertFalse(builder.isReindexRequired());
NodeState state = builder.build();
assertFalse(state.getBoolean(REINDEX_PROPERTY_NAME));
assertFalse(state.getBoolean(PROP_REFRESH_DEFN));
NodeState baseState = builder.build();
nodeBuilder = baseState.builder();
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.indexRule("nt:file");
assertTrue(builder.isReindexRequired());
state = builder.build();
assertTrue(state.getBoolean(REINDEX_PROPERTY_NAME));
builder = new IndexDefinitionBuilder(baseState.builder(), false);
builder.indexRule("nt:file");
assertTrue(builder.isReindexRequired());
state = builder.build();
assertTrue(builder.isReindexRequired());
assertFalse(state.getBoolean(REINDEX_PROPERTY_NAME));
}
@Test
public void reindexAndAsyncFlagChange() throws Exception{
builder.async("async", IndexConstants.INDEXING_MODE_NRT);
nodeBuilder = builder.build().builder();
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
NodeState oldState = nodeBuilder.getNodeState();
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.async("async", IndexConstants.INDEXING_MODE_SYNC);
assertFalse(builder.build().getBoolean(REINDEX_PROPERTY_NAME));
builder = new IndexDefinitionBuilder(oldState.builder());
builder.async("fulltext-async", IndexConstants.INDEXING_MODE_SYNC);
assertTrue(builder.build().getBoolean(REINDEX_PROPERTY_NAME));
}
@Test
public void noReindexWhenIfQueryPathsAddedOrChanged() {
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.queryPaths("/a","/b");
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.queryPaths("/a","/c");
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().removeProperty(IndexConstants.QUERY_PATHS);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void noReindexWhenIfIndexTagsAddedOrChanged() {
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.tags("foo1", "foo2");
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.tags("foo2", "foo3");
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.addTags("foo2");
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertFalse(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().removeProperty(IndexConstants.INDEX_TAGS);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void noReindexWhenIfBlobSizeAddedOrChanged() {
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().setProperty(BLOB_SIZE,32768);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().setProperty(BLOB_SIZE,35768);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().removeProperty(BLOB_SIZE);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
// This property is used in cost estimation - no reindexing required
// on property change
@Test
public void noReindexIfWeightPropertyAddedOrChanged() throws Exception {
builder.indexRule("nt:base").property("fooProp");
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
//Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
// Add the property weight to fooProp - this shouldn't cause reindex flag to set
builder.indexRule("nt:base").property("fooProp").weight(10);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
// Now change the value for weight on fooProp - this also shouldn't lead to setting of reindex flag
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.indexRule("nt:base").property("fooProp").weight(20);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
//Now check for property delete use case
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.indexRule("nt:base").property("fooProp").getBuilderTree().removeProperty(PROP_WEIGHT);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
// modifying boost value shouldn't require reindexing because we use
// QueryTime Boosts and not index time boosts. Refer OAK-3367 for details
@Test
public void noReindexIfBoostPropAddedOrChanged() throws Exception {
builder.indexRule("nt:base").property("fooProp");
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
//Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
// Add the property boost - this shouldn't cause reindex flag to set
builder.indexRule("nt:base").property("fooProp").boost(1.0f);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
// Now change the value for boost - this also shouldn't lead to setting of reindex flag
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.indexRule("nt:base").property("fooProp").boost(2.0f);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
//Now check for property delete use case
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.indexRule("nt:base").property("fooProp").getBuilderTree().removeProperty(FIELD_BOOST);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void noReindexOnUseIfExists() throws Exception {
builder.indexRule("nt:base").property("foo1");
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
// Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
// Add the "useIfExists" property
builder.getBuilderTree().setProperty(IndexConstants.USE_IF_EXISTS, "/oak:index");
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
// Now test deleting the flag - should also not set the reindexing flag
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().removeProperty(IndexConstants.USE_IF_EXISTS);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
// This is a node for configuration on how faceted search works
// Everything impacts querty time evauation - so no need of reindexing in case of changes
@Test
public void noReindexWhenFacetNodeAddedOrRemoved() throws Exception {
builder.indexRule("nt:base")
.property("foo1").facets();
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
//Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
//Add the facets child node now
builder.getBuilderTree().addChild(PROP_FACETS);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
// Now test deleting the facets node should also not set the reindexing flag
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild(PROP_FACETS).remove();
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void noReindexWhenFacetConfigChanged_topChildren() throws Exception {
builder.indexRule("nt:base")
.property("foo1").facets();
builder.getBuilderTree().addChild(PROP_FACETS);
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
//Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
//Add top Children prop on facets node
builder.getBuilderTree().getChild(PROP_FACETS).setProperty(FulltextIndexConstants.PROP_FACETS_TOP_CHILDREN,100);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
//Now test with changing the value - this too shouldn't set the reindexing flag
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild(PROP_FACETS).setProperty(FulltextIndexConstants.PROP_FACETS_TOP_CHILDREN,200);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
//Now check for property delete use case
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild(PROP_FACETS).removeProperty(FulltextIndexConstants.PROP_FACETS_TOP_CHILDREN);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void noReindexWhenFacetConfigChanged_secure() throws Exception {
builder.indexRule("nt:base")
.property("foo1").facets();
builder.getBuilderTree().addChild(PROP_FACETS);
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
//Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
//Add top secure prop on facets node
builder.getBuilderTree().getChild(PROP_FACETS).setProperty(FulltextIndexConstants.PROP_SECURE_FACETS,FulltextIndexConstants.PROP_SECURE_FACETS_VALUE_SECURE);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
//Now test with changing the value - this too shouldn't set the reindexing flag
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild(PROP_FACETS).setProperty(FulltextIndexConstants.PROP_SECURE_FACETS,FulltextIndexConstants.PROP_SECURE_FACETS_VALUE_INSECURE);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
//Now check for property delete use case
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild(PROP_FACETS).removeProperty(FulltextIndexConstants.PROP_SECURE_FACETS);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void noReindexWhenFacetConfigChanged_sampleSize() throws Exception {
builder.indexRule("nt:base")
.property("foo1").facets();
builder.getBuilderTree().addChild(PROP_FACETS);
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
//Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
//Add top sample size prop on facets node
builder.getBuilderTree().getChild(PROP_FACETS).setProperty(FulltextIndexConstants.PROP_STATISTICAL_FACET_SAMPLE_SIZE,1000);
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
//Now test with changing the value - this too shouldn't set the reindexing flag
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild(PROP_FACETS).setProperty(FulltextIndexConstants.PROP_STATISTICAL_FACET_SAMPLE_SIZE,2000);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
//Now check for property delete use case
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild(PROP_FACETS).removeProperty(FulltextIndexConstants.PROP_STATISTICAL_FACET_SAMPLE_SIZE);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void noReindexWhenIfCostPerExecAddedOrChanged() {
builder.indexRule("nt:base");
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
//Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild("indexRules").getChild("nt:base").setProperty(COST_PER_EXECUTION, 2.0);
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
//Now test with changing the value - this too shouldn't set the reindexing flag
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild("indexRules").getChild("nt:base").setProperty(COST_PER_EXECUTION, 3.0);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
//Now check for property delete use case
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild("indexRules").getChild("nt:base").removeProperty(COST_PER_EXECUTION);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void noReindexWhenIfCostPerEntryAddedOrChanged() {
builder.indexRule("nt:base");
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
//Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild("indexRules").getChild("nt:base").setProperty(COST_PER_ENTRY, 2.0);
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
//Now test with changing the value - this too shouldn't set the reindexing flag
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild("indexRules").getChild("nt:base").setProperty(COST_PER_ENTRY, 3.0);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
//Now check for property delete use case
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild("indexRules").getChild("nt:base").removeProperty(COST_PER_ENTRY);
currentNodeState = builder.build();
assertFalse(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertTrue(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void reindexFlagSetWhenRequired() {
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.includedPaths("/a", "/b");
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
nodeBuilder = currentNodeState.builder();
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.includedPaths("/a", "/c");
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
nodeBuilder = currentNodeState.builder();
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().removeProperty(PathFilter.PROP_INCLUDED_PATHS);
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
}
@Test
public void renidexIfFacetsNodeAddedwithSomeNewPropThatReqIndexing() throws Exception {
builder.indexRule("nt:base")
.property("foo1").facets();
NodeState currentNodeState = builder.build();
nodeBuilder = currentNodeState.builder();
//Unset the reindex flag first because first build would have set it .
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().addChild(PROP_FACETS);
//Add foo prop on facets node
builder.getBuilderTree().getChild(PROP_FACETS).setProperty("foo","bar");
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertFalse(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
//Now test with changing the value - this too should set the reindexing flag
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild(PROP_FACETS).setProperty("foo","bar2");
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertFalse(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
//now deleting the node
nodeBuilder = currentNodeState.builder();
nodeBuilder.removeProperty(PROP_REFRESH_DEFN);
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().getChild(PROP_FACETS).remove();
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertFalse(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
nodeBuilder = currentNodeState.builder();
nodeBuilder.setProperty(REINDEX_PROPERTY_NAME, false);
builder = new IndexDefinitionBuilder(nodeBuilder);
builder.getBuilderTree().addChild(PROP_FACETS);
//Add foo prop on facets node
builder.getBuilderTree().getChild(PROP_FACETS).setProperty("foo","bar");
builder.getBuilderTree().getChild(PROP_FACETS).setProperty(FulltextIndexConstants.PROP_STATISTICAL_FACET_SAMPLE_SIZE,200);
currentNodeState = builder.build();
assertTrue(currentNodeState.getBoolean(REINDEX_PROPERTY_NAME));
assertFalse(currentNodeState.getBoolean(PROP_REFRESH_DEFN));
}
@Test
public void propRuleCustomName() throws Exception{
builder.indexRule("nt:base").property("foo").property("bar");
builder.indexRule("nt:base").property("fooProp", "foo2");
builder.indexRule("nt:base").property("fooProp", "foo2");
Root idx = new ImmutableRoot(builder.build());
assertTrue(idx.getTree("/indexRules/nt:base/properties/fooProp").exists());
assertTrue(idx.getTree("/indexRules/nt:base/properties/bar").exists());
assertTrue(idx.getTree("/indexRules/nt:base/properties/foo").exists());
}
@Test
public void typeNotChangedIfSet() throws Exception{
NodeState state = builder.build();
assertEquals("lucene", state.getString("type"));
NodeBuilder updated = state.builder();
updated.setProperty("type", "disabled");
IndexDefinitionBuilder newBuilder = new IndexDefinitionBuilder(updated);
NodeState updatedState = newBuilder.build();
assertEquals("disabled", updatedState.getString("type"));
//Type other than 'disabled' would be reset
updated.setProperty("type", "foo");
assertEquals("lucene", new IndexDefinitionBuilder(updated).build().getString("type"));
}
@Test
public void nodeTypeIndex() throws Exception{
builder.nodeTypeIndex();
builder.indexRule("nt:file");
NodeState state = builder.build();
assertTrue(state.getChildNode("indexRules").exists());
NodeState ntFileRule = state.getChildNode("indexRules").getChildNode("nt:file");
assertTrue(ntFileRule.exists());
assertTrue(state.getBoolean(FulltextIndexConstants.PROP_INDEX_NODE_TYPE));
assertFalse(ntFileRule.getBoolean(FulltextIndexConstants.PROP_SYNC));
}
@Test
public void nodeTypeIndexSync() throws Exception{
builder.nodeTypeIndex();
builder.indexRule("nt:file").sync();
NodeState state = builder.build();
assertTrue(state.getChildNode("indexRules").exists());
NodeState ntFileRule = state.getChildNode("indexRules").getChildNode("nt:file");
assertTrue(ntFileRule.exists());
assertTrue(state.getBoolean(FulltextIndexConstants.PROP_INDEX_NODE_TYPE));
assertTrue(ntFileRule.getBoolean(FulltextIndexConstants.PROP_SYNC));
}
@Test
public void noPropertiesNodeForEmptyRule() throws Exception{
builder.nodeTypeIndex();
builder.indexRule("nt:file").sync();
NodeState state = builder.build();
assertFalse(NodeStateUtils.getNode(state, "/indexRules/nt:file/properties").exists());
}
@Test
public void deprecated() {
NodeState state = builder.build();
assertFalse("By default index isn't deprecated", state.getBoolean(INDEX_DEPRECATED));
state = builder.deprecated().build();
assertTrue("Index must be deprecated if marked so", state.getBoolean(INDEX_DEPRECATED));
}
@Test
public void boost() {
builder.indexRule("nt:base")
.property("foo1").boost(1.0f).enclosingRule()
.property("foo2").boost(2.0f);
NodeState state = builder.build();
NodeState foo1 = NodeStateUtils.getNode(state, "indexRules/nt:base/properties/foo1");
assertTrue(foo1.exists());
assertEquals("Incorrectly set boost",
1.0f, foo1.getProperty(FIELD_BOOST).getValue(Type.DOUBLE).floatValue(), 0.0001);
NodeState foo2 = NodeStateUtils.getNode(state, "indexRules/nt:base/properties/foo2");
assertTrue(foo2.exists());
assertEquals("Incorrectly set boost",
2.0f, foo2.getProperty(FIELD_BOOST).getValue(Type.DOUBLE).floatValue(), 0.0001);
}
@Test
public void facets() {
builder.indexRule("nt:base")
.property("foo1").facets().enclosingRule()
.property("foo2").propertyIndex();
NodeState state = builder.build();
NodeState foo1 = NodeStateUtils.getNode(state, "indexRules/nt:base/properties/foo1");
assertTrue(foo1.exists());
assertTrue("Incorrectly set facets property",
foo1.getBoolean(PROP_FACETS));
NodeState foo2 = NodeStateUtils.getNode(state, "indexRules/nt:base/properties/foo2");
assertTrue(foo2.exists());
assertFalse("Incorrectly existing facets property",
foo2.hasProperty(PROP_FACETS));
}
@Test
public void tags() {
NodeState state = EMPTY_NODE;
builder = new IndexDefinitionBuilder(state.builder());
builder.tags("foo");
state = builder.build();
Iterable<String> tags = state.getProperty(INDEX_TAGS).getValue(Type.STRINGS);
assertEquals("Unexpected number of tags", 1, Iterables.size(tags));
assertThat(state.getProperty(INDEX_TAGS).getValue(Type.STRINGS),
Matchers.containsInAnyOrder("foo"));
builder = new IndexDefinitionBuilder(state.builder());
builder.addTags("foo");
state = builder.build();
tags = state.getProperty(INDEX_TAGS).getValue(Type.STRINGS);
assertEquals("Unexpected number of tags", 1, Iterables.size(tags));
assertThat(state.getProperty(INDEX_TAGS).getValue(Type.STRINGS),
Matchers.containsInAnyOrder("foo"));
builder = new IndexDefinitionBuilder(state.builder());
builder.addTags("foo", "foo1");
state = builder.build();
tags = state.getProperty(INDEX_TAGS).getValue(Type.STRINGS);
assertEquals("Unexpected number of tags", 2, Iterables.size(tags));
assertThat(state.getProperty(INDEX_TAGS).getValue(Type.STRINGS),
Matchers.containsInAnyOrder("foo", "foo1"));
builder = new IndexDefinitionBuilder(state.builder());
builder.addTags("foo2");
state = builder.build();
tags = state.getProperty(INDEX_TAGS).getValue(Type.STRINGS);
assertEquals("Unexpected number of tags", 3, Iterables.size(tags));
assertThat(state.getProperty(INDEX_TAGS).getValue(Type.STRINGS),
Matchers.containsInAnyOrder("foo", "foo1", "foo2"));
builder = new IndexDefinitionBuilder(state.builder());
builder.addTags("foo2", "foo3");
state = builder.build();
tags = state.getProperty(INDEX_TAGS).getValue(Type.STRINGS);
assertEquals("Unexpected number of tags", 4, Iterables.size(tags));
assertThat(state.getProperty(INDEX_TAGS).getValue(Type.STRINGS),
Matchers.containsInAnyOrder("foo", "foo1", "foo2", "foo3"));
builder = new IndexDefinitionBuilder(state.builder());
builder.tags("foo4");
state = builder.build();
tags = state.getProperty(INDEX_TAGS).getValue(Type.STRINGS);
assertEquals("Unexpected number of tags", 1, Iterables.size(tags));
assertThat(state.getProperty(INDEX_TAGS).getValue(Type.STRINGS),
Matchers.containsInAnyOrder("foo4"));
builder = new IndexDefinitionBuilder(EMPTY_NODE.builder());
builder.addTags("foo5");
state = builder.build();
tags = state.getProperty(INDEX_TAGS).getValue(Type.STRINGS);
assertEquals("Unexpected number of tags", 1, Iterables.size(tags));
assertThat(state.getProperty(INDEX_TAGS).getValue(Type.STRINGS),
Matchers.containsInAnyOrder("foo5"));
}
@Test
public void unnamedPropertyRuleInExistingIndex() {
// create an initial index with property rule for "foo"
builder
.indexRule("nt:base")
.property("foo")
// remove "name" property explicitly
.getBuilderTree().removeProperty("name");
NodeState initialIndexState = builder.build();
// Use initial index def to add some other property rule - this should work
new IndexDefinitionBuilder(initialIndexState.builder())
.indexRule("nt:base")
.property("bar");
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.analysis.common;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter;
import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
import org.elasticsearch.index.analysis.AnalysisMode;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Function;
public class MultiplexerTokenFilterFactory extends AbstractTokenFilterFactory {
private List<String> filterNames;
private final boolean preserveOriginal;
public MultiplexerTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) throws IOException {
super(indexSettings, name, settings);
this.filterNames = settings.getAsList("filters");
this.preserveOriginal = settings.getAsBoolean("preserve_original", true);
}
@Override
public TokenStream create(TokenStream tokenStream) {
throw new UnsupportedOperationException("TokenFilterFactory.getChainAwareTokenFilterFactory() must be called first");
}
@Override
public TokenFilterFactory getSynonymFilter() {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
@Override
public TokenFilterFactory getChainAwareTokenFilterFactory(TokenizerFactory tokenizer, List<CharFilterFactory> charFilters,
List<TokenFilterFactory> previousTokenFilters,
Function<String, TokenFilterFactory> allFilters) {
List<TokenFilterFactory> filters = new ArrayList<>();
if (preserveOriginal) {
filters.add(IDENTITY_FILTER);
}
// also merge and transfer token filter analysis modes with analyzer
AnalysisMode mode = AnalysisMode.ALL;
for (String filter : filterNames) {
String[] parts = Strings.tokenizeToStringArray(filter, ",");
if (parts.length == 1) {
TokenFilterFactory factory = resolveFilterFactory(allFilters, parts[0]);
factory = factory.getChainAwareTokenFilterFactory(tokenizer, charFilters, previousTokenFilters, allFilters);
filters.add(factory);
mode = mode.merge(factory.getAnalysisMode());
} else {
List<TokenFilterFactory> existingChain = new ArrayList<>(previousTokenFilters);
List<TokenFilterFactory> chain = new ArrayList<>();
for (String subfilter : parts) {
TokenFilterFactory factory = resolveFilterFactory(allFilters, subfilter);
factory = factory.getChainAwareTokenFilterFactory(tokenizer, charFilters, existingChain, allFilters);
chain.add(factory);
existingChain.add(factory);
mode = mode.merge(factory.getAnalysisMode());
}
filters.add(chainFilters(filter, chain));
}
}
final AnalysisMode analysisMode = mode;
return new TokenFilterFactory() {
@Override
public String name() {
return MultiplexerTokenFilterFactory.this.name();
}
@Override
public TokenStream create(TokenStream tokenStream) {
List<Function<TokenStream, TokenStream>> functions = new ArrayList<>();
for (TokenFilterFactory tff : filters) {
functions.add(tff::create);
}
return new RemoveDuplicatesTokenFilter(new MultiplexTokenFilter(tokenStream, functions));
}
@Override
public TokenFilterFactory getSynonymFilter() {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
@Override
public AnalysisMode getAnalysisMode() {
return analysisMode;
}
};
}
private TokenFilterFactory chainFilters(String name, List<TokenFilterFactory> filters) {
return new TokenFilterFactory() {
@Override
public String name() {
return name;
}
@Override
public TokenStream create(TokenStream tokenStream) {
for (TokenFilterFactory tff : filters) {
tokenStream = tff.create(tokenStream);
}
return tokenStream;
}
};
}
private TokenFilterFactory resolveFilterFactory(Function<String, TokenFilterFactory> factories, String name) {
TokenFilterFactory factory = factories.apply(name);
if (factory == null) {
throw new IllegalArgumentException("Multiplexing filter [" + name() + "] refers to undefined tokenfilter [" + name + "]");
} else {
return factory;
}
}
private final class MultiplexTokenFilter extends TokenFilter {
private final TokenStream source;
private final int filterCount;
private int selector;
/**
* Creates a MultiplexTokenFilter on the given input with a set of filters
*/
MultiplexTokenFilter(TokenStream input, List<Function<TokenStream, TokenStream>> filters) {
super(input);
TokenStream source = new MultiplexerFilter(input);
for (int i = 0; i < filters.size(); i++) {
final int slot = i;
source = new ConditionalTokenFilter(source, filters.get(i)) {
@Override
protected boolean shouldFilter() {
return slot == selector;
}
};
}
this.source = source;
this.filterCount = filters.size();
this.selector = filterCount - 1;
}
@Override
public boolean incrementToken() throws IOException {
return source.incrementToken();
}
@Override
public void end() throws IOException {
source.end();
}
@Override
public void reset() throws IOException {
source.reset();
}
private final class MultiplexerFilter extends TokenFilter {
State state;
PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class);
private MultiplexerFilter(TokenStream input) {
super(input);
}
@Override
public boolean incrementToken() throws IOException {
if (selector >= filterCount - 1) {
selector = 0;
if (input.incrementToken() == false) {
return false;
}
state = captureState();
return true;
}
restoreState(state);
posIncAtt.setPositionIncrement(0);
selector++;
return true;
}
@Override
public void reset() throws IOException {
super.reset();
selector = filterCount - 1;
this.state = null;
}
}
}
}
| |
package org.robolectric.internal;
import static java.util.Arrays.asList;
import com.google.common.collect.Lists;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.ServiceLoader;
import javax.annotation.Nonnull;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
import org.junit.runners.model.TestClass;
import org.robolectric.internal.bytecode.ClassHandler;
import org.robolectric.internal.bytecode.InstrumentationConfiguration;
import org.robolectric.internal.bytecode.Interceptor;
import org.robolectric.internal.bytecode.Interceptors;
import org.robolectric.internal.bytecode.Sandbox;
import org.robolectric.internal.bytecode.SandboxClassLoader;
import org.robolectric.internal.bytecode.SandboxConfig;
import org.robolectric.internal.bytecode.ShadowInfo;
import org.robolectric.internal.bytecode.ShadowMap;
import org.robolectric.internal.bytecode.ShadowWrangler;
import org.robolectric.pluginapi.perf.Metadata;
import org.robolectric.pluginapi.perf.Metric;
import org.robolectric.pluginapi.perf.PerfStatsReporter;
import org.robolectric.util.PerfStatsCollector;
import org.robolectric.util.PerfStatsCollector.Event;
public class SandboxTestRunner extends BlockJUnit4ClassRunner {
private static final ShadowMap BASE_SHADOW_MAP;
static {
ServiceLoader<ShadowProvider> shadowProviders = ServiceLoader.load(ShadowProvider.class);
BASE_SHADOW_MAP = ShadowMap.createFromShadowProviders(shadowProviders);
}
private final Interceptors interceptors;
private final List<PerfStatsReporter> perfStatsReporters;
private final HashSet<Class<?>> loadedTestClasses = new HashSet<>();
public SandboxTestRunner(Class<?> klass) throws InitializationError {
super(klass);
interceptors = new Interceptors(findInterceptors());
perfStatsReporters = Lists.newArrayList(getPerfStatsReporters().iterator());
}
@Nonnull
protected Iterable<PerfStatsReporter> getPerfStatsReporters() {
return ServiceLoader.load(PerfStatsReporter.class);
}
@Nonnull
protected Collection<Interceptor> findInterceptors() {
return Collections.emptyList();
}
@Nonnull
protected Interceptors getInterceptors() {
return interceptors;
}
@Override
protected Statement classBlock(RunNotifier notifier) {
final Statement statement = childrenInvoker(notifier);
return new Statement() {
@Override
public void evaluate() throws Throwable {
try {
statement.evaluate();
for (Class<?> testClass : loadedTestClasses) {
invokeAfterClass(testClass);
}
} finally {
afterClass();
loadedTestClasses.clear();
}
}
};
}
private void invokeBeforeClass(final Class clazz) throws Throwable {
if (!loadedTestClasses.contains(clazz)) {
loadedTestClasses.add(clazz);
final TestClass testClass = new TestClass(clazz);
final List<FrameworkMethod> befores = testClass.getAnnotatedMethods(BeforeClass.class);
for (FrameworkMethod before : befores) {
before.invokeExplosively(null);
}
}
}
private static void invokeAfterClass(final Class<?> clazz) throws Throwable {
final TestClass testClass = new TestClass(clazz);
final List<FrameworkMethod> afters = testClass.getAnnotatedMethods(AfterClass.class);
for (FrameworkMethod after : afters) {
after.invokeExplosively(null);
}
}
protected void afterClass() {
}
@Nonnull
protected Sandbox getSandbox(FrameworkMethod method) {
InstrumentationConfiguration instrumentationConfiguration = createClassLoaderConfig(method);
ClassLoader sandboxClassLoader = new SandboxClassLoader(ClassLoader.getSystemClassLoader(), instrumentationConfiguration);
return new Sandbox(sandboxClassLoader);
}
/**
* Create an {@link InstrumentationConfiguration} suitable for the provided {@link FrameworkMethod}.
*
* Custom TestRunner subclasses may wish to override this method to provide alternate configuration.
*
* @param method the test method that's about to run
* @return an {@link InstrumentationConfiguration}
*/
@Nonnull
protected InstrumentationConfiguration createClassLoaderConfig(FrameworkMethod method) {
InstrumentationConfiguration.Builder builder = InstrumentationConfiguration.newBuilder()
.doNotAcquirePackage("java.")
.doNotAcquirePackage("sun.")
.doNotAcquirePackage("org.robolectric.annotation.")
.doNotAcquirePackage("org.robolectric.internal.")
.doNotAcquirePackage("org.robolectric.pluginapi.")
.doNotAcquirePackage("org.robolectric.util.")
.doNotAcquirePackage("org.junit.");
String customPackages = System.getProperty("org.robolectric.packagesToNotAcquire", "");
for (String pkg : customPackages.split(",")) {
if (!pkg.isEmpty()) {
builder.doNotAcquirePackage(pkg);
}
}
for (Class<?> shadowClass : getExtraShadows(method)) {
ShadowInfo shadowInfo = ShadowMap.obtainShadowInfo(shadowClass);
builder.addInstrumentedClass(shadowInfo.shadowedClassName);
}
addInstrumentedPackages(method, builder);
return builder.build();
}
private void addInstrumentedPackages(FrameworkMethod method, InstrumentationConfiguration.Builder builder) {
SandboxConfig classConfig = getTestClass().getJavaClass().getAnnotation(SandboxConfig.class);
if (classConfig != null) {
for (String pkgName : classConfig.instrumentedPackages()) {
builder.addInstrumentedPackage(pkgName);
}
}
SandboxConfig methodConfig = method.getAnnotation(SandboxConfig.class);
if (methodConfig != null) {
for (String pkgName : methodConfig.instrumentedPackages()) {
builder.addInstrumentedPackage(pkgName);
}
}
}
protected void configureSandbox(Sandbox sandbox, FrameworkMethod method) {
ShadowMap.Builder builder = createShadowMap().newBuilder();
// Configure shadows *BEFORE* setting the ClassLoader. This is necessary because
// creating the ShadowMap loads all ShadowProviders via ServiceLoader and this is
// not available once we install the Robolectric class loader.
Class<?>[] shadows = getExtraShadows(method);
if (shadows.length > 0) {
builder.addShadowClasses(shadows);
}
ShadowMap shadowMap = builder.build();
sandbox.replaceShadowMap(shadowMap);
sandbox.configure(createClassHandler(shadowMap, sandbox), getInterceptors());
}
@Override protected Statement methodBlock(final FrameworkMethod method) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
PerfStatsCollector perfStatsCollector = PerfStatsCollector.getInstance();
perfStatsCollector.reset();
perfStatsCollector.setEnabled(!perfStatsReporters.isEmpty());
Event initialization = perfStatsCollector.startEvent("initialization");
Sandbox sandbox = getSandbox(method);
// Configure sandbox *BEFORE* setting the ClassLoader. This is necessary because
// creating the ShadowMap loads all ShadowProviders via ServiceLoader and this is
// not available once we install the Robolectric class loader.
configureSandbox(sandbox, method);
final ClassLoader priorContextClassLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(sandbox.getRobolectricClassLoader());
//noinspection unchecked
Class bootstrappedTestClass = sandbox.bootstrappedClass(getTestClass().getJavaClass());
HelperTestRunner helperTestRunner = getHelperTestRunner(bootstrappedTestClass);
helperTestRunner.frameworkMethod = method;
final Method bootstrappedMethod;
try {
//noinspection unchecked
bootstrappedMethod = bootstrappedTestClass.getMethod(method.getMethod().getName());
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
try {
// Only invoke @BeforeClass once per class
invokeBeforeClass(bootstrappedTestClass);
beforeTest(sandbox, method, bootstrappedMethod);
initialization.finished();
final Statement statement = helperTestRunner.methodBlock(new FrameworkMethod(bootstrappedMethod));
// todo: this try/finally probably isn't right -- should mimic RunAfters? [xw]
try {
statement.evaluate();
} finally {
afterTest(method, bootstrappedMethod);
}
} finally {
Thread.currentThread().setContextClassLoader(priorContextClassLoader);
finallyAfterTest(method);
reportPerfStats(perfStatsCollector);
perfStatsCollector.reset();
}
}
};
}
private void reportPerfStats(PerfStatsCollector perfStatsCollector) {
if (perfStatsReporters.isEmpty()) {
return;
}
Metadata metadata = perfStatsCollector.getMetadata();
Collection<Metric> metrics = perfStatsCollector.getMetrics();
for (PerfStatsReporter perfStatsReporter : perfStatsReporters) {
try {
perfStatsReporter.report(metadata, metrics);
} catch (Exception e) {
e.printStackTrace();
}
}
}
protected void beforeTest(Sandbox sandbox, FrameworkMethod method, Method bootstrappedMethod) throws Throwable {
}
protected void afterTest(FrameworkMethod method, Method bootstrappedMethod) {
}
protected void finallyAfterTest(FrameworkMethod method) {
}
protected HelperTestRunner getHelperTestRunner(Class bootstrappedTestClass) {
try {
return new HelperTestRunner(bootstrappedTestClass);
} catch (InitializationError initializationError) {
throw new RuntimeException(initializationError);
}
}
protected static class HelperTestRunner extends BlockJUnit4ClassRunner {
public FrameworkMethod frameworkMethod;
public HelperTestRunner(Class<?> klass) throws InitializationError {
super(klass);
}
// cuz accessibility
@Override
protected Statement methodBlock(FrameworkMethod method) {
return super.methodBlock(method);
}
}
@Nonnull
protected Class<?>[] getExtraShadows(FrameworkMethod method) {
List<Class<?>> shadowClasses = new ArrayList<>();
addShadows(shadowClasses, getTestClass().getJavaClass().getAnnotation(SandboxConfig.class));
addShadows(shadowClasses, method.getAnnotation(SandboxConfig.class));
return shadowClasses.toArray(new Class[shadowClasses.size()]);
}
private void addShadows(List<Class<?>> shadowClasses, SandboxConfig annotation) {
if (annotation != null) {
shadowClasses.addAll(asList(annotation.shadows()));
}
}
protected ShadowMap createShadowMap() {
return BASE_SHADOW_MAP;
}
@Nonnull
protected ClassHandler createClassHandler(ShadowMap shadowMap, Sandbox sandbox) {
return new ShadowWrangler(shadowMap, 0, interceptors);
}
protected boolean shouldIgnore(FrameworkMethod method) {
return method.getAnnotation(Ignore.class) != null;
}
}
| |
/*
* Copyright 2019 ZetaSQL Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.zetasql;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.zetasql.TypeTestBase.checkSerializable;
import static com.google.zetasql.TypeTestBase.checkTypeSerializationAndDeserialization;
import static com.google.zetasql.TypeTestBase.checkTypeSerializationAndDeserializationExistingPools;
import static com.google.zetasql.TypeTestBase.getDescriptorPoolWithTypeProtoAndTypeKind;
import com.google.common.collect.Lists;
import com.google.common.testing.EqualsTester;
import com.google.protobuf.DescriptorProtos.FieldDescriptorProto;
import com.google.protobuf.DescriptorProtos.FieldOptions;
import com.google.protobuf.DescriptorProtos.FileDescriptorSet;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.zetasql.ZetaSQLType.ProtoTypeProto;
import com.google.zetasql.ZetaSQLType.TypeKind;
import com.google.zetasql.ZetaSQLType.TypeProto;
import com.google.zetasql.TypeAnnotationProto.FieldFormat;
import com.google.zetasqltest.TestSchemaProto.FieldFormatsProto;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class ProtoTypeTest {
@Test
public void testSerializationAndDeserialization() {
TypeFactory factory = TypeFactory.nonUniqueNames();
checkTypeSerializationAndDeserialization(factory.createProtoType(TypeProto.class));
ZetaSQLDescriptorPool pool = getDescriptorPoolWithTypeProtoAndTypeKind();
ProtoType type = factory.createProtoType(pool.findMessageTypeByName("zetasql.TypeProto"));
checkTypeSerializationAndDeserialization(type);
List<ZetaSQLDescriptorPool> pools = Lists.newArrayList(pool);
checkTypeSerializationAndDeserializationExistingPools(type, pools);
ProtoType type2 =
factory.createProtoType(pool.findMessageTypeByName("zetasql.ArrayTypeProto"));
checkTypeSerializationAndDeserialization(type2);
ProtoType type3 =
factory.createProtoType(pool.findMessageTypeByName("zetasql.ProtoTypeProto"));
checkTypeSerializationAndDeserialization(type3);
}
@Test
public void testSerializeationAndDeserializationMultipleTypesWithSharedPools() {
ZetaSQLDescriptorPool pool = getDescriptorPoolWithTypeProtoAndTypeKind();
ZetaSQLDescriptorPool pool2 = getDescriptorPoolWithTypeProtoAndTypeKind();
TypeFactory factory = TypeFactory.nonUniqueNames();
List<Type> types = new ArrayList<>();
// some proto
types.add(factory.createProtoType(pool.findMessageTypeByName("zetasql.StructTypeProto")));
// another proto
types.add(factory.createProtoType(pool.findMessageTypeByName("zetasql.EnumTypeProto")));
// duplicated proto from different pool
types.add(factory.createProtoType(pool2.findMessageTypeByName("zetasql.EnumTypeProto")));
// duplicated proto from same pool
types.add(factory.createProtoType(pool2.findMessageTypeByName("zetasql.EnumTypeProto")));
// and an enum
types.add(factory.createEnumType(pool.findEnumTypeByName("zetasql.TypeKind")));
// add some simple types
types.add(TypeFactory.createSimpleType(TypeKind.TYPE_BOOL));
types.add(TypeFactory.createSimpleType(TypeKind.TYPE_DOUBLE));
FileDescriptorSetsBuilder fileDescriptorSetsBuilder = new FileDescriptorSetsBuilder();
List<TypeProto> protos = new ArrayList<>();
for (Type type : types) {
TypeProto.Builder builder = TypeProto.newBuilder();
type.serialize(builder, fileDescriptorSetsBuilder);
protos.add(builder.build());
}
List<FileDescriptorSet> sets = fileDescriptorSetsBuilder.build();
// total number of FileDescriptorSet serialized:
// matches the number of DescriptorPools used above.
assertThat(sets).hasSize(2);
List<ZetaSQLDescriptorPool> pools = new ArrayList<>();
for (FileDescriptorSet fileDescriptorSet : sets) {
pool = new ZetaSQLDescriptorPool();
pool.importFileDescriptorSet(fileDescriptorSet);
pools.add(pool);
}
assertThat(protos).hasSize(types.size());
for (TypeProto proto : protos) {
// type protos are not self-contained
assertThat(proto.getFileDescriptorSetCount()).isEqualTo(0);
// but can be deserialized with existing pools
Type type = factory.deserialize(proto, pools);
checkTypeSerializationAndDeserialization(type);
}
}
@Test
public void testSerializable() {
TypeFactory factory = TypeFactory.nonUniqueNames();
checkTypeSerializationAndDeserialization(factory.createProtoType(TypeProto.class));
ZetaSQLDescriptorPool pool = getDescriptorPoolWithTypeProtoAndTypeKind();
ProtoType type = factory.createProtoType(pool.findMessageTypeByName("zetasql.TypeProto"));
checkSerializable(type);
ProtoType type2 =
factory.createProtoType(pool.findMessageTypeByName("zetasql.ArrayTypeProto"));
checkSerializable(type2);
ProtoType type3 =
factory.createProtoType(pool.findMessageTypeByName("zetasql.ProtoTypeProto"));
checkSerializable(type3);
}
@Test
public void testSerializeableMultipleTypesWithSharedPools() {
ZetaSQLDescriptorPool pool = getDescriptorPoolWithTypeProtoAndTypeKind();
ZetaSQLDescriptorPool pool2 = getDescriptorPoolWithTypeProtoAndTypeKind();
TypeFactory factory = TypeFactory.nonUniqueNames();
List<Type> types = new ArrayList<>();
// some proto
types.add(factory.createProtoType(pool.findMessageTypeByName("zetasql.StructTypeProto")));
// another proto
types.add(factory.createProtoType(pool.findMessageTypeByName("zetasql.EnumTypeProto")));
// duplicated proto from different pool
types.add(factory.createProtoType(pool2.findMessageTypeByName("zetasql.EnumTypeProto")));
// duplicated proto from same pool
types.add(factory.createProtoType(pool2.findMessageTypeByName("zetasql.EnumTypeProto")));
// and an enum
types.add(factory.createEnumType(pool.findEnumTypeByName("zetasql.TypeKind")));
// add some simple types
types.add(TypeFactory.createSimpleType(TypeKind.TYPE_BOOL));
types.add(TypeFactory.createSimpleType(TypeKind.TYPE_DOUBLE));
FileDescriptorSetsBuilder fileDescriptorSetsBuilder = new FileDescriptorSetsBuilder();
List<TypeProto> protos = new ArrayList<>();
for (Type type : types) {
TypeProto.Builder builder = TypeProto.newBuilder();
type.serialize(builder, fileDescriptorSetsBuilder);
protos.add(builder.build());
}
List<FileDescriptorSet> sets = fileDescriptorSetsBuilder.build();
// total number of FileDescriptorSet serialized:
// matches the number of DescriptorPools used above.
assertThat(sets).hasSize(2);
List<ZetaSQLDescriptorPool> pools = new ArrayList<>();
for (FileDescriptorSet fileDescriptorSet : sets) {
pool = new ZetaSQLDescriptorPool();
pool.importFileDescriptorSet(fileDescriptorSet);
pools.add(pool);
}
assertThat(protos).hasSize(types.size());
for (TypeProto proto : protos) {
// type protos are not self-contained
assertThat(proto.getFileDescriptorSetCount()).isEqualTo(0);
// but can be deserialized with existing pools
Type type = factory.deserialize(proto, pools);
checkSerializable(type);
}
}
@Test
public void testEquivalent() {
TypeFactory factory = TypeFactory.nonUniqueNames();
ProtoType proto1 = factory.createProtoType(TypeProto.class);
ProtoType proto2 =
factory.createProtoType(
getDescriptorPoolWithTypeProtoAndTypeKind()
.findMessageTypeByName("zetasql.TypeProto"));
ProtoType proto3 = factory.createProtoType(FieldDescriptorProto.class);
assertThat(proto1.equivalent(proto1)).isTrue();
assertThat(proto1.equivalent(proto2)).isTrue();
assertThat(proto1.equivalent(proto3)).isFalse();
assertThat(proto2.equivalent(proto1)).isTrue();
assertThat(proto2.equivalent(proto2)).isTrue();
assertThat(proto2.equivalent(proto3)).isFalse();
assertThat(proto3.equivalent(proto1)).isFalse();
assertThat(proto3.equivalent(proto2)).isFalse();
assertThat(proto3.equivalent(proto3)).isTrue();
assertThat(proto1.equivalent(TypeFactory.createSimpleType(TypeKind.TYPE_BOOL))).isFalse();
}
private void verifyFormatAnnotation(FieldFormat.Format format, String fieldName) {
FieldDescriptor field = FieldFormatsProto.getDescriptor().findFieldByName(fieldName);
assertThat(ProtoType.getFormatAnnotation(field)).isEqualTo(format);
assertThat(ProtoType.hasFormatAnnotation(field))
.isEqualTo(format != FieldFormat.Format.DEFAULT_FORMAT);
}
@Test
public void testFormatAnnotations() {
verifyFormatAnnotation(FieldFormat.Format.DEFAULT_FORMAT, "no_annotation");
verifyFormatAnnotation(FieldFormat.Format.DATE, "date");
verifyFormatAnnotation(FieldFormat.Format.DATE, "date_64");
verifyFormatAnnotation(FieldFormat.Format.DATE_DECIMAL, "date_decimal");
verifyFormatAnnotation(FieldFormat.Format.DATE_DECIMAL, "date_decimal_64");
verifyFormatAnnotation(FieldFormat.Format.DATE_DECIMAL, "date_decimal_encoding");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_SECONDS, "seconds");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MILLIS, "millis");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "micros");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_SECONDS, "seconds_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MILLIS, "millis_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "micros_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "micros_u64");
verifyFormatAnnotation(FieldFormat.Format.DATE, "repeated_date");
verifyFormatAnnotation(FieldFormat.Format.DATE_DECIMAL, "repeated_date_decimal");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_SECONDS, "repeated_seconds");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MILLIS, "repeated_millis");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "repeated_micros");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_SECONDS, "repeated_seconds_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MILLIS, "repeated_millis_format");
verifyFormatAnnotation(FieldFormat.Format.TIMESTAMP_MICROS, "repeated_micros_format");
}
@Test
public void testEquals() {
TypeFactory factory = TypeFactory.nonUniqueNames();
ProtoType proto1 = factory.createProtoType(TypeProto.class);
ProtoType proto2 =
factory.createProtoType(
getDescriptorPoolWithTypeProtoAndTypeKind()
.findMessageTypeByName("zetasql.TypeProto"));
ProtoType proto3 = factory.createProtoType(FieldDescriptorProto.class);
new EqualsTester().addEqualityGroup(proto1).testEquals();
assertThat(proto1.equals(proto2)).isFalse();
assertThat(proto1.equals(proto3)).isFalse();
assertThat(proto2.equals(proto1)).isFalse();
new EqualsTester().addEqualityGroup(proto2).testEquals();
assertThat(proto2.equals(proto3)).isFalse();
assertThat(proto3.equals(proto1)).isFalse();
assertThat(proto3.equals(proto2)).isFalse();
new EqualsTester().addEqualityGroup(proto3).testEquals();
assertThat(proto1.equals(TypeFactory.createSimpleType(TypeKind.TYPE_BOOL))).isFalse();
}
@Test
public void testAsProto() {
TypeFactory factory = TypeFactory.nonUniqueNames();
ArrayType array =
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT32));
EnumType enumType = factory.createEnumType(TypeKind.class);
ProtoType proto = factory.createProtoType(TypeProto.class);
List<StructType.StructField> fields = new ArrayList<>();
fields.add(new StructType.StructField("", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)));
fields.add(new StructType.StructField("a", TypeFactory.createSimpleType(TypeKind.TYPE_INT32)));
StructType struct = TypeFactory.createStructType(fields);
assertThat(proto.asProto()).isEqualTo(proto);
assertThat(array.asProto()).isNull();
assertThat(enumType.asProto()).isNull();
assertThat(struct.asProto()).isNull();
assertThat(TypeFactory.createSimpleType(TypeKind.TYPE_INT32).asProto()).isNull();
}
@Test
public void testClassAndProtoSize() {
assertWithMessage(
"The number of fields of ProtoTypeProto has changed, "
+ "please also update the serialization code accordingly.")
.that(ProtoTypeProto.getDescriptor().getFields())
.hasSize(3);
assertWithMessage(
"The number of fields in ProtoType class has changed, "
+ "please also update the proto and serialization code accordingly.")
.that(TestUtil.getNonStaticFieldCount(ProtoType.class))
.isEqualTo(2);
}
}
| |
/**
* Java Modular Image Synthesis Toolkit (JMIST)
* Copyright (C) 2018 Bradley W. Kimmel
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package ca.eandb.jmist.framework;
import java.io.Serializable;
import ca.eandb.jmist.math.AffineMatrix3;
import ca.eandb.jmist.math.HPoint3;
import ca.eandb.jmist.math.LinearMatrix3;
import ca.eandb.jmist.math.Point3;
import ca.eandb.jmist.math.Ray3;
import ca.eandb.jmist.math.Vector3;
/**
* A class for classes implementing <code>AffineTransformable3</code> that
* require the inverse of the transformation matrix.
*
* @author Brad Kimmel
*/
public class InvertibleAffineTransformation3 extends AffineTransformation3
implements Serializable {
/** Serialization version ID. */
private static final long serialVersionUID = -5759913323363262892L;
/** The inverse transformation matrix. */
private AffineMatrix3 inverse = null;
@Override
public void rotate(Vector3 axis, double angle) {
super.rotate(axis, angle);
this.applyInverseTransformation(LinearMatrix3
.rotateMatrix(axis, -angle));
}
@Override
public void rotateX(double angle) {
super.rotateX(angle);
this.applyInverseTransformation(LinearMatrix3.rotateXMatrix(-angle));
}
@Override
public void rotateY(double angle) {
super.rotateY(angle);
this.applyInverseTransformation(LinearMatrix3.rotateYMatrix(-angle));
}
@Override
public void rotateZ(double angle) {
super.rotateZ(angle);
this.applyInverseTransformation(LinearMatrix3.rotateZMatrix(-angle));
}
@Override
public void scale(double c) {
super.scale(c);
this.applyInverseTransformation(LinearMatrix3.scaleMatrix(1.0 / c));
}
@Override
public void stretch(double cx, double cy, double cz) {
super.stretch(cx, cy, cz);
this.applyInverseTransformation(LinearMatrix3.stretchMatrix(1.0 / cx,
1.0 / cy, 1.0 / cz));
}
@Override
public void stretch(Vector3 axis, double c) {
super.stretch(axis, c);
this.applyInverseTransformation(LinearMatrix3.stretchMatrix(axis,
1.0 / c));
}
@Override
public void stretchX(double cx) {
super.stretchX(cx);
this.applyInverseTransformation(LinearMatrix3.stretchXMatrix(1.0 / cx));
}
@Override
public void stretchY(double cy) {
super.stretchY(cy);
this.applyInverseTransformation(LinearMatrix3.stretchYMatrix(1.0 / cy));
}
@Override
public void stretchZ(double cz) {
super.stretchZ(cz);
this.applyInverseTransformation(LinearMatrix3.stretchZMatrix(1.0 / cz));
}
@Override
public void transform(AffineMatrix3 T) {
super.transform(T);
this.applyInverseTransformation(T.inverse());
}
@Override
public void transform(LinearMatrix3 T) {
super.transform(T);
this.applyInverseTransformation(T.inverse());
}
@Override
public void translate(Vector3 v) {
super.translate(v);
this.applyInverseTransformation(AffineMatrix3.translateMatrix(v
.opposite()));
}
/**
* Applies the specified inverse transformation matrix to the current
* inverse transformation.
*
* @param Tinv
* The inverse of the <code>AffineMatrix3</code> that is being
* applied.
*/
private void applyInverseTransformation(AffineMatrix3 Tinv) {
if (this.inverse == null) {
this.inverse = Tinv;
} else {
this.inverse = this.inverse.times(Tinv);
}
}
/**
* Applies the specified inverse transformation matrix to the current
* inverse transformation.
*
* @param Tinv
* The inverse of the <code>LinearMatrix3</code> that is being
* applied.
*/
private void applyInverseTransformation(LinearMatrix3 Tinv) {
this.applyInverseTransformation(new AffineMatrix3(Tinv));
}
/**
* Gets the inverse transformation matrix.
*
* @return The <code>AffineMatrix3</code> representing the inverse of this
* transformation.
*/
protected AffineMatrix3 getInverseTransformationMatrix() {
return this.inverse != null ? this.inverse : AffineMatrix3.IDENTITY;
}
/**
* Applies this transformation to the specified
* <code>InvertibleAffineTransformation3</code>.
*
* @param trans
* The <code>InvertibleAffineTransformation3</code> to apply
* this transformation to.
*/
public void apply(InvertibleAffineTransformation3 trans) {
if (this.isDirty()) {
trans.applyTransformation(super.getTransformationMatrix());
trans.applyInverseTransformation(this.inverse);
}
}
@Override
public void apply(AffineTransformable3 to) {
if (to instanceof InvertibleAffineTransformation3) {
this.apply((InvertibleAffineTransformation3) to);
} else {
super.apply(to);
}
}
/**
* Applies the inverse of this transformation to the specified
* <code>InvertibleAffineTransformation3</code>.
*
* @param trans
* The <code>InvertibleAffineTransformation3</code> to apply
* the inverse of this transformation to.
*/
public void applyInverse(InvertibleAffineTransformation3 trans) {
if (this.isDirty()) {
trans.applyTransformation(this.inverse);
trans.applyInverseTransformation(super.getTransformationMatrix());
}
}
/**
* Applies the inverse of this <code>AffineTransformation3</code> to
* another object that is affine transformable.
*
* @param to
* The <code>AffineTransformable3</code> object to apply the
* inverse of this transformation to.
*/
public void applyInverse(AffineTransformable3 to) {
if (to instanceof InvertibleAffineTransformation3) {
this.applyInverse((InvertibleAffineTransformation3) to);
} else if (this.inverse != null) {
to.transform(this.inverse);
}
}
/**
* Applies the inverse of this <code>AffineTransformation3</code> to a
* <code>AffineMatrix3</code>.
*
* @param matrix
* The <code>AffineMatrix3</code> object to apply the inverse
* of this transformation to.
* @return The transformed <code>AffineMatrix3</code>.
*/
public AffineMatrix3 applyInverse(AffineMatrix3 matrix) {
return this.inverse != null ? this.inverse.times(matrix) : matrix;
}
/**
* Applies the inverse of this <code>AffineTransformation3</code> to a
* <code>HPoint3</code>.
*
* @param p
* The <code>HPoint3</code> object to apply the inverse of this
* transformation to.
* @return The transformed <code>HPoint3</code>.
*/
public HPoint3 applyInverse(HPoint3 p) {
return this.inverse != null ? this.inverse.times(p) : p;
}
/**
* Applies the inverse of this <code>AffineTransformation3</code> to a
* <code>Point3</code>.
*
* @param p
* The <code>Point3</code> object to apply the inverse of this
* transformation to.
* @return The transformed <code>Point3</code>.
*/
public Point3 applyInverse(Point3 p) {
return this.inverse != null ? this.inverse.times(p) : p;
}
/**
* Applies the inverse of this <code>AffineTransformation3</code> to a
* <code>Vector3</code>.
*
* @param v
* The <code>Vector3</code> object to apply the inverse of this
* transformation to.
* @return The transformed <code>Vector3</code>.
*/
public Vector3 applyInverse(Vector3 v) {
return this.inverse != null ? this.inverse.times(v) : v;
}
/**
* Applies the inverse of this <code>AffineTransformation3</code> to a
* <code>Ray3</code>.
*
* @param ray
* The <code>Ray3</code> object to apply the inverse of this
* transformation to.
* @return The transformed <code>Ray3</code>.
*/
public Ray3 applyInverse(Ray3 ray) {
return this.inverse != null ? ray.transform(this.inverse) : ray;
}
@Override
public void reset() {
super.reset();
this.inverse = null;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.eventgrid.v2020_04_01_preview.implementation;
import java.util.List;
import com.microsoft.azure.management.eventgrid.v2020_04_01_preview.TopicProvisioningState;
import com.microsoft.azure.management.eventgrid.v2020_04_01_preview.InputSchema;
import com.microsoft.azure.management.eventgrid.v2020_04_01_preview.InputSchemaMapping;
import com.microsoft.azure.management.eventgrid.v2020_04_01_preview.PublicNetworkAccess;
import com.microsoft.azure.management.eventgrid.v2020_04_01_preview.InboundIpRule;
import com.microsoft.azure.management.eventgrid.v2020_04_01_preview.ResourceSku;
import com.microsoft.azure.management.eventgrid.v2020_04_01_preview.IdentityInfo;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.microsoft.rest.serializer.JsonFlatten;
import com.microsoft.azure.Resource;
/**
* EventGrid Topic.
*/
@JsonFlatten
public class TopicInner extends Resource {
/**
* The privateEndpointConnections property.
*/
@JsonProperty(value = "properties.privateEndpointConnections")
private List<PrivateEndpointConnectionInner> privateEndpointConnections;
/**
* Provisioning state of the topic. Possible values include: 'Creating',
* 'Updating', 'Deleting', 'Succeeded', 'Canceled', 'Failed'.
*/
@JsonProperty(value = "properties.provisioningState", access = JsonProperty.Access.WRITE_ONLY)
private TopicProvisioningState provisioningState;
/**
* Endpoint for the topic.
*/
@JsonProperty(value = "properties.endpoint", access = JsonProperty.Access.WRITE_ONLY)
private String endpoint;
/**
* This determines the format that Event Grid should expect for incoming
* events published to the topic. Possible values include:
* 'EventGridSchema', 'CustomEventSchema', 'CloudEventSchemaV1_0'.
*/
@JsonProperty(value = "properties.inputSchema")
private InputSchema inputSchema;
/**
* This enables publishing using custom event schemas. An
* InputSchemaMapping can be specified to map various properties of a
* source schema to various required properties of the EventGridEvent
* schema.
*/
@JsonProperty(value = "properties.inputSchemaMapping")
private InputSchemaMapping inputSchemaMapping;
/**
* Metric resource id for the topic.
*/
@JsonProperty(value = "properties.metricResourceId", access = JsonProperty.Access.WRITE_ONLY)
private String metricResourceId;
/**
* This determines if traffic is allowed over public network. By default it
* is enabled.
* You can further restrict to specific IPs by configuring <seealso
* cref="P:Microsoft.Azure.Events.ResourceProvider.Common.Contracts.TopicProperties.InboundIpRules"
* />. Possible values include: 'Enabled', 'Disabled'.
*/
@JsonProperty(value = "properties.publicNetworkAccess")
private PublicNetworkAccess publicNetworkAccess;
/**
* This can be used to restrict traffic from specific IPs instead of all
* IPs. Note: These are considered only if PublicNetworkAccess is enabled.
*/
@JsonProperty(value = "properties.inboundIpRules")
private List<InboundIpRule> inboundIpRules;
/**
* The Sku pricing tier for the topic.
*/
@JsonProperty(value = "sku")
private ResourceSku sku;
/**
* Identity information for the resource.
*/
@JsonProperty(value = "identity")
private IdentityInfo identity;
/**
* Get the privateEndpointConnections value.
*
* @return the privateEndpointConnections value
*/
public List<PrivateEndpointConnectionInner> privateEndpointConnections() {
return this.privateEndpointConnections;
}
/**
* Set the privateEndpointConnections value.
*
* @param privateEndpointConnections the privateEndpointConnections value to set
* @return the TopicInner object itself.
*/
public TopicInner withPrivateEndpointConnections(List<PrivateEndpointConnectionInner> privateEndpointConnections) {
this.privateEndpointConnections = privateEndpointConnections;
return this;
}
/**
* Get provisioning state of the topic. Possible values include: 'Creating', 'Updating', 'Deleting', 'Succeeded', 'Canceled', 'Failed'.
*
* @return the provisioningState value
*/
public TopicProvisioningState provisioningState() {
return this.provisioningState;
}
/**
* Get endpoint for the topic.
*
* @return the endpoint value
*/
public String endpoint() {
return this.endpoint;
}
/**
* Get this determines the format that Event Grid should expect for incoming events published to the topic. Possible values include: 'EventGridSchema', 'CustomEventSchema', 'CloudEventSchemaV1_0'.
*
* @return the inputSchema value
*/
public InputSchema inputSchema() {
return this.inputSchema;
}
/**
* Set this determines the format that Event Grid should expect for incoming events published to the topic. Possible values include: 'EventGridSchema', 'CustomEventSchema', 'CloudEventSchemaV1_0'.
*
* @param inputSchema the inputSchema value to set
* @return the TopicInner object itself.
*/
public TopicInner withInputSchema(InputSchema inputSchema) {
this.inputSchema = inputSchema;
return this;
}
/**
* Get this enables publishing using custom event schemas. An InputSchemaMapping can be specified to map various properties of a source schema to various required properties of the EventGridEvent schema.
*
* @return the inputSchemaMapping value
*/
public InputSchemaMapping inputSchemaMapping() {
return this.inputSchemaMapping;
}
/**
* Set this enables publishing using custom event schemas. An InputSchemaMapping can be specified to map various properties of a source schema to various required properties of the EventGridEvent schema.
*
* @param inputSchemaMapping the inputSchemaMapping value to set
* @return the TopicInner object itself.
*/
public TopicInner withInputSchemaMapping(InputSchemaMapping inputSchemaMapping) {
this.inputSchemaMapping = inputSchemaMapping;
return this;
}
/**
* Get metric resource id for the topic.
*
* @return the metricResourceId value
*/
public String metricResourceId() {
return this.metricResourceId;
}
/**
* Get this determines if traffic is allowed over public network. By default it is enabled.
You can further restrict to specific IPs by configuring <seealso cref="P:Microsoft.Azure.Events.ResourceProvider.Common.Contracts.TopicProperties.InboundIpRules" />. Possible values include: 'Enabled', 'Disabled'.
*
* @return the publicNetworkAccess value
*/
public PublicNetworkAccess publicNetworkAccess() {
return this.publicNetworkAccess;
}
/**
* Set this determines if traffic is allowed over public network. By default it is enabled.
You can further restrict to specific IPs by configuring <seealso cref="P:Microsoft.Azure.Events.ResourceProvider.Common.Contracts.TopicProperties.InboundIpRules" />. Possible values include: 'Enabled', 'Disabled'.
*
* @param publicNetworkAccess the publicNetworkAccess value to set
* @return the TopicInner object itself.
*/
public TopicInner withPublicNetworkAccess(PublicNetworkAccess publicNetworkAccess) {
this.publicNetworkAccess = publicNetworkAccess;
return this;
}
/**
* Get this can be used to restrict traffic from specific IPs instead of all IPs. Note: These are considered only if PublicNetworkAccess is enabled.
*
* @return the inboundIpRules value
*/
public List<InboundIpRule> inboundIpRules() {
return this.inboundIpRules;
}
/**
* Set this can be used to restrict traffic from specific IPs instead of all IPs. Note: These are considered only if PublicNetworkAccess is enabled.
*
* @param inboundIpRules the inboundIpRules value to set
* @return the TopicInner object itself.
*/
public TopicInner withInboundIpRules(List<InboundIpRule> inboundIpRules) {
this.inboundIpRules = inboundIpRules;
return this;
}
/**
* Get the Sku pricing tier for the topic.
*
* @return the sku value
*/
public ResourceSku sku() {
return this.sku;
}
/**
* Set the Sku pricing tier for the topic.
*
* @param sku the sku value to set
* @return the TopicInner object itself.
*/
public TopicInner withSku(ResourceSku sku) {
this.sku = sku;
return this;
}
/**
* Get identity information for the resource.
*
* @return the identity value
*/
public IdentityInfo identity() {
return this.identity;
}
/**
* Set identity information for the resource.
*
* @param identity the identity value to set
* @return the TopicInner object itself.
*/
public TopicInner withIdentity(IdentityInfo identity) {
this.identity = identity;
return this;
}
}
| |
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.ovsdb.provider.host;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.util.List;
import java.util.Set;
import java.util.function.Consumer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onlab.packet.IpAddress;
import org.onlab.packet.MacAddress;
import org.onlab.packet.TpPort;
import org.onosproject.net.DeviceId;
import org.onosproject.net.HostId;
import org.onosproject.net.HostLocation;
import org.onosproject.net.host.HostDescription;
import org.onosproject.net.host.HostProvider;
import org.onosproject.net.host.HostProviderRegistry;
import org.onosproject.net.host.HostProviderService;
import org.onosproject.net.provider.AbstractProviderService;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.ovsdb.controller.DefaultEventSubject;
import org.onosproject.ovsdb.controller.EventSubject;
import org.onosproject.ovsdb.controller.OvsdbClientService;
import org.onosproject.ovsdb.controller.OvsdbController;
import org.onosproject.ovsdb.controller.OvsdbDatapathId;
import org.onosproject.ovsdb.controller.OvsdbEvent;
import org.onosproject.ovsdb.controller.OvsdbEventListener;
import org.onosproject.ovsdb.controller.OvsdbIfaceId;
import org.onosproject.ovsdb.controller.OvsdbNodeId;
import org.onosproject.ovsdb.controller.OvsdbNodeListener;
import org.onosproject.ovsdb.controller.OvsdbPortName;
import org.onosproject.ovsdb.controller.OvsdbPortNumber;
import org.onosproject.ovsdb.controller.OvsdbPortType;
/**
* Test for ovsdb host provider.
*/
public class OvsdbHostProviderTest {
private static final MacAddress MAC = MacAddress
.valueOf("00:00:11:00:00:01");
private final OvsdbHostProvider provider = new OvsdbHostProvider();
private final TestHostRegistry hostRegistry = new TestHostRegistry();
protected OvsdbControllerTest controller = new OvsdbControllerTest();
private TestHostProviderService providerService;
@Before
public void setUp() {
provider.providerRegistry = hostRegistry;
provider.controller = controller;
provider.activate();
}
@Test
public void basics() {
assertNotNull("registration expected", providerService);
assertEquals("incorrect provider", provider, providerService.provider());
}
@Test
public void portAdded() {
DefaultEventSubject eventSubject = new DefaultEventSubject(MAC, null,
new OvsdbPortName("portName"),
new OvsdbPortNumber(0L),
new OvsdbDatapathId("10002"),
new OvsdbPortType("vxlan"),
new OvsdbIfaceId("102345"));
controller.ovsdbEventListener
.handle(new OvsdbEvent<EventSubject>(
OvsdbEvent.Type.PORT_ADDED,
eventSubject));
assertNotNull("never went throught the provider service",
providerService.added);
}
@Test
public void portRemoved() {
DefaultEventSubject eventSubject = new DefaultEventSubject(MAC, null,
new OvsdbPortName("portName"),
new OvsdbPortNumber(0L),
new OvsdbDatapathId("10002"),
new OvsdbPortType("vxlan"),
new OvsdbIfaceId("102345"));
controller.ovsdbEventListener
.handle(new OvsdbEvent<EventSubject>(
OvsdbEvent.Type.PORT_REMOVED,
eventSubject));
assertEquals("port status unhandled", 1, providerService.removeCount);
}
@After
public void tearDown() {
provider.deactivate();
provider.coreService = null;
provider.providerRegistry = null;
}
private class TestHostRegistry implements HostProviderRegistry {
@Override
public HostProviderService register(HostProvider provider) {
providerService = new TestHostProviderService(provider);
return providerService;
}
@Override
public void unregister(HostProvider provider) {
}
@Override
public Set<ProviderId> getProviders() {
return null;
}
}
private class TestHostProviderService
extends AbstractProviderService<HostProvider>
implements HostProviderService {
DeviceId added = null;
DeviceId moved = null;
DeviceId spine = null;
public int removeCount;
protected TestHostProviderService(HostProvider provider) {
super(provider);
}
@Override
public void hostDetected(HostId hostId, HostDescription hostDescription, boolean replaceIps) {
DeviceId descr = hostDescription.location().deviceId();
if (added == null) {
added = descr;
} else if ((moved == null) && !descr.equals(added)) {
moved = descr;
} else {
spine = descr;
}
}
@Override
public void hostVanished(HostId hostId) {
removeCount++;
}
@Override
public void removeIpFromHost(HostId hostId, IpAddress ipAddress) {
}
@Override
public void removeLocationFromHost(HostId hostId, HostLocation location) {
}
}
private class OvsdbControllerTest implements OvsdbController {
private OvsdbEventListener ovsdbEventListener = null;
@Override
public void addNodeListener(OvsdbNodeListener listener) {
}
@Override
public void removeNodeListener(OvsdbNodeListener listener) {
}
@Override
public void addOvsdbEventListener(OvsdbEventListener listener) {
ovsdbEventListener = listener;
}
@Override
public void removeOvsdbEventListener(OvsdbEventListener listener) {
ovsdbEventListener = null;
}
@Override
public List<OvsdbNodeId> getNodeIds() {
return null;
}
@Override
public OvsdbClientService getOvsdbClient(OvsdbNodeId nodeId) {
return null;
}
@Override
public void connect(IpAddress ip, TpPort port) {
}
@Override
public void connect(IpAddress ip, TpPort port, Consumer<Exception> failhandler) {
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.codedeploy.model;
import java.io.Serializable;
/**
* <p>
* Represents the output of a list deployment groups operation.
* </p>
*/
public class ListDeploymentGroupsResult implements Serializable, Cloneable {
/**
* <p>
* The application name.
* </p>
*/
private String applicationName;
/**
* <p>
* A list of corresponding deployment group names.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> deploymentGroups;
/**
* <p>
* If the amount of information that is returned is significantly large, an
* identifier will also be returned, which can be used in a subsequent list
* deployment groups call to return the next set of deployment groups in the
* list.
* </p>
*/
private String nextToken;
/**
* <p>
* The application name.
* </p>
*
* @param applicationName
* The application name.
*/
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
/**
* <p>
* The application name.
* </p>
*
* @return The application name.
*/
public String getApplicationName() {
return this.applicationName;
}
/**
* <p>
* The application name.
* </p>
*
* @param applicationName
* The application name.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListDeploymentGroupsResult withApplicationName(String applicationName) {
setApplicationName(applicationName);
return this;
}
/**
* <p>
* A list of corresponding deployment group names.
* </p>
*
* @return A list of corresponding deployment group names.
*/
public java.util.List<String> getDeploymentGroups() {
if (deploymentGroups == null) {
deploymentGroups = new com.amazonaws.internal.SdkInternalList<String>();
}
return deploymentGroups;
}
/**
* <p>
* A list of corresponding deployment group names.
* </p>
*
* @param deploymentGroups
* A list of corresponding deployment group names.
*/
public void setDeploymentGroups(
java.util.Collection<String> deploymentGroups) {
if (deploymentGroups == null) {
this.deploymentGroups = null;
return;
}
this.deploymentGroups = new com.amazonaws.internal.SdkInternalList<String>(
deploymentGroups);
}
/**
* <p>
* A list of corresponding deployment group names.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setDeploymentGroups(java.util.Collection)} or
* {@link #withDeploymentGroups(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param deploymentGroups
* A list of corresponding deployment group names.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListDeploymentGroupsResult withDeploymentGroups(
String... deploymentGroups) {
if (this.deploymentGroups == null) {
setDeploymentGroups(new com.amazonaws.internal.SdkInternalList<String>(
deploymentGroups.length));
}
for (String ele : deploymentGroups) {
this.deploymentGroups.add(ele);
}
return this;
}
/**
* <p>
* A list of corresponding deployment group names.
* </p>
*
* @param deploymentGroups
* A list of corresponding deployment group names.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListDeploymentGroupsResult withDeploymentGroups(
java.util.Collection<String> deploymentGroups) {
setDeploymentGroups(deploymentGroups);
return this;
}
/**
* <p>
* If the amount of information that is returned is significantly large, an
* identifier will also be returned, which can be used in a subsequent list
* deployment groups call to return the next set of deployment groups in the
* list.
* </p>
*
* @param nextToken
* If the amount of information that is returned is significantly
* large, an identifier will also be returned, which can be used in a
* subsequent list deployment groups call to return the next set of
* deployment groups in the list.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* If the amount of information that is returned is significantly large, an
* identifier will also be returned, which can be used in a subsequent list
* deployment groups call to return the next set of deployment groups in the
* list.
* </p>
*
* @return If the amount of information that is returned is significantly
* large, an identifier will also be returned, which can be used in
* a subsequent list deployment groups call to return the next set
* of deployment groups in the list.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* If the amount of information that is returned is significantly large, an
* identifier will also be returned, which can be used in a subsequent list
* deployment groups call to return the next set of deployment groups in the
* list.
* </p>
*
* @param nextToken
* If the amount of information that is returned is significantly
* large, an identifier will also be returned, which can be used in a
* subsequent list deployment groups call to return the next set of
* deployment groups in the list.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListDeploymentGroupsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getApplicationName() != null)
sb.append("ApplicationName: " + getApplicationName() + ",");
if (getDeploymentGroups() != null)
sb.append("DeploymentGroups: " + getDeploymentGroups() + ",");
if (getNextToken() != null)
sb.append("NextToken: " + getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListDeploymentGroupsResult == false)
return false;
ListDeploymentGroupsResult other = (ListDeploymentGroupsResult) obj;
if (other.getApplicationName() == null
^ this.getApplicationName() == null)
return false;
if (other.getApplicationName() != null
&& other.getApplicationName().equals(this.getApplicationName()) == false)
return false;
if (other.getDeploymentGroups() == null
^ this.getDeploymentGroups() == null)
return false;
if (other.getDeploymentGroups() != null
&& other.getDeploymentGroups().equals(
this.getDeploymentGroups()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null
&& other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getApplicationName() == null) ? 0 : getApplicationName()
.hashCode());
hashCode = prime
* hashCode
+ ((getDeploymentGroups() == null) ? 0 : getDeploymentGroups()
.hashCode());
hashCode = prime * hashCode
+ ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListDeploymentGroupsResult clone() {
try {
return (ListDeploymentGroupsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
package uk.gov.register.store.postgres;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import io.dropwizard.jdbi.OptionalContainerFactory;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.skife.jdbi.v2.DBI;
import org.skife.jdbi.v2.Handle;
import uk.gov.register.core.Entry;
import uk.gov.register.core.EntryType;
import uk.gov.register.core.Item;
import uk.gov.register.core.Record;
import uk.gov.register.db.EntryDAO;
import uk.gov.register.db.EntryQueryDAO;
import uk.gov.register.db.ItemDAO;
import uk.gov.register.db.ItemQueryDAO;
import uk.gov.register.db.RecordQueryDAO;
import uk.gov.register.functional.app.RegisterRule;
import uk.gov.register.functional.app.TestRegister;
import uk.gov.register.functional.app.WipeDatabaseRule;
import uk.gov.register.util.HashValue;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.Optional;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static uk.gov.register.core.HashingAlgorithm.*;
public class PostgresDataAccessLayerTest {
private PostgresDataAccessLayer postgresDataAccessLayer;
private ObjectMapper objectMapper = new ObjectMapper();
@Rule
public RegisterRule register = new RegisterRule();
private DBI dbi;
private Handle handle;
@Before
public void setup() {
register.wipe();
TestRegister register = TestRegister.address;
dbi = new DBI(register.getDatabaseConnectionString("BatchedPostgresDataAccessLayerTest"));
dbi.registerContainerFactory(new OptionalContainerFactory());
handle = dbi.open();
postgresDataAccessLayer = new PostgresDataAccessLayer(
handle.attach(EntryDAO.class),
handle.attach(EntryQueryDAO.class),
handle.attach(ItemDAO.class),
handle.attach(ItemQueryDAO.class),
handle.attach(RecordQueryDAO.class),
register.getSchema());
}
@After
public void tearDown() {
dbi.close(handle);
register.wipe();
}
@Test
public void canAppendAndGetEntry() {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry entry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry entry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
postgresDataAccessLayer.appendEntry(entry1);
postgresDataAccessLayer.appendEntry(entry2);
assertThat(postgresDataAccessLayer.getEntry(1), is(Optional.of(entry1)));
assertThat(postgresDataAccessLayer.getEntry(2), is(Optional.of(entry2)));
assertThat(postgresDataAccessLayer.getEntry(3), is(Optional.empty()));
}
@Test
public void canAppendEntriesInBatch() {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry entry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry entry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
postgresDataAccessLayer.appendEntries(Arrays.asList(entry1, entry2));
assertThat(postgresDataAccessLayer.getEntry(1), is(Optional.of(entry1)));
assertThat(postgresDataAccessLayer.getEntry(2), is(Optional.of(entry2)));
assertThat(postgresDataAccessLayer.getEntry(3), is(Optional.empty()));
}
@Test(expected = IllegalArgumentException.class)
public void appendUserEntriesThatSkipEntryNumberThrowsException() {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry entry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry entry2 = new Entry(3, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
postgresDataAccessLayer.appendEntry(entry1);
postgresDataAccessLayer.appendEntry(entry2);
}
@Test(expected = IllegalArgumentException.class)
public void appendUserEntriesWithDuplicateEntryNumberThrowsException() {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry entry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry entry2 = new Entry(1, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
postgresDataAccessLayer.appendEntry(entry1);
postgresDataAccessLayer.appendEntry(entry2);
}
@Test(expected = IllegalArgumentException.class)
public void appendSystemEntriesThatSkipEntryNumberThrowsException() {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry entry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.system);
Entry entry2 = new Entry(3, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.system);
postgresDataAccessLayer.appendEntry(entry1);
postgresDataAccessLayer.appendEntry(entry2);
}
@Test(expected = IllegalArgumentException.class)
public void appendSystemEntriesWithDuplicateEntryNumberThrowsException() {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry entry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.system);
Entry entry2 = new Entry(1, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.system);
postgresDataAccessLayer.appendEntry(entry1);
postgresDataAccessLayer.appendEntry(entry2);
}
@Test
public void canGetMultipleEntries() {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry entry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry entry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
Entry entry3 = new Entry(3, new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "blobhash3"), timestamp, "key3", EntryType.user);
Entry entry4 = new Entry(4, new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "blobhash3"), timestamp, "key3", EntryType.user);
postgresDataAccessLayer.appendEntry(entry1);
postgresDataAccessLayer.appendEntry(entry2);
postgresDataAccessLayer.appendEntry(entry3);
postgresDataAccessLayer.appendEntry(entry4);
assertThat(postgresDataAccessLayer.getAllEntries().size(), is(4));
assertThat(postgresDataAccessLayer.getAllEntries(), contains(entry4, entry3, entry2, entry1));
assertThat(postgresDataAccessLayer.getEntries(1, 10).size(), is(4));
assertThat(postgresDataAccessLayer.getEntries(1, 10), contains(entry1, entry2, entry3, entry4));
assertThat(postgresDataAccessLayer.getEntries(2, 2).size(), is(2));
assertThat(postgresDataAccessLayer.getEntries(2, 2), contains(entry2, entry3));
}
@Test
public void canGetEntryIterator() {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry userEntry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry userEntry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
Entry userEntry3 = new Entry(3, new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "blobhash3"), timestamp, "key3", EntryType.user);
Entry userEntry4 = new Entry(4, new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "blobhash4"), timestamp, "key4", EntryType.user);
Entry systemEntry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.system);
Entry systemEntry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.system);
Entry systemEntry3 = new Entry(3, new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "blobhash3"), timestamp, "key3", EntryType.system);
Entry systemEntry4 = new Entry(4, new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "blobhash4"), timestamp, "key4", EntryType.system);
postgresDataAccessLayer.appendEntry(userEntry1);
postgresDataAccessLayer.appendEntry(userEntry2);
postgresDataAccessLayer.appendEntry(userEntry3);
postgresDataAccessLayer.appendEntry(userEntry4);
postgresDataAccessLayer.appendEntry(systemEntry1);
postgresDataAccessLayer.appendEntry(systemEntry2);
postgresDataAccessLayer.appendEntry(systemEntry3);
postgresDataAccessLayer.appendEntry(systemEntry4);
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.user)).size(), is(4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.user)), contains(userEntry1, userEntry2, userEntry3, userEntry4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.user, 0, 10)).size(), is(4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.user, 0, 10)), contains(userEntry1, userEntry2, userEntry3, userEntry4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.user, 1, 3)).size(), is(2));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.user, 1, 3)), contains(userEntry2, userEntry3));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.system)).size(), is(4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.system)), contains(systemEntry1, systemEntry2, systemEntry3, systemEntry4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.system, 0, 10)).size(), is(4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.system, 0, 10)), contains(systemEntry1, systemEntry2, systemEntry3, systemEntry4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.system, 1, 3)).size(), is(2));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getEntryIterator(EntryType.system, 1, 3)), contains(systemEntry2, systemEntry3));
}
@Test
public void canGetEntriesByKey() {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry entry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry entry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
Entry entry3 = new Entry(3, new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "blobhash3"), timestamp, "key3", EntryType.user);
Entry entry4 = new Entry(4, new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "blobhash4"), timestamp, "key3", EntryType.user);
postgresDataAccessLayer.appendEntry(entry1);
postgresDataAccessLayer.appendEntry(entry2);
postgresDataAccessLayer.appendEntry(entry3);
postgresDataAccessLayer.appendEntry(entry4);
assertThat(postgresDataAccessLayer.getAllEntriesByKey("key1").size(), is(1));
assertThat(postgresDataAccessLayer.getAllEntriesByKey("key1"), contains(entry1));
assertThat(postgresDataAccessLayer.getAllEntriesByKey("key2").size(), is(1));
assertThat(postgresDataAccessLayer.getAllEntriesByKey("key2"), contains(entry2));
assertThat(postgresDataAccessLayer.getAllEntriesByKey("key3").size(), is(2));
assertThat(postgresDataAccessLayer.getAllEntriesByKey("key3"), contains(entry3, entry4));
}
@Test
public void canGetTotalEntries() {
assertThat(postgresDataAccessLayer.getTotalEntries(EntryType.user), is(0));
assertThat(postgresDataAccessLayer.getTotalEntries(EntryType.system), is(0));
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry userEntry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry userEntry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
Entry userEntry3 = new Entry(3, new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "blobhash3"), timestamp, "key3", EntryType.user);
Entry userEntry4 = new Entry(4, new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "blobhash4"), timestamp, "key4", EntryType.user);
postgresDataAccessLayer.appendEntry(userEntry1);
postgresDataAccessLayer.appendEntry(userEntry2);
postgresDataAccessLayer.appendEntry(userEntry3);
postgresDataAccessLayer.appendEntry(userEntry4);
assertThat(postgresDataAccessLayer.getTotalEntries(EntryType.user), is(4));
Entry systemEntry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.system);
Entry systemEntry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.system);
postgresDataAccessLayer.appendEntry(systemEntry1);
postgresDataAccessLayer.appendEntry(systemEntry2);
assertThat(postgresDataAccessLayer.getTotalEntries(EntryType.system), is(2));
}
@Test
public void canGetLastUpdatedTime() {
assertThat(postgresDataAccessLayer.getLastUpdatedTime(), is(Optional.empty()));
Instant timestamp1 = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Instant timestamp2 = timestamp1.plus(Duration.ofDays(1));
Instant timestamp3 = timestamp2.plus(Duration.ofDays(1));
Entry userEntry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp1, "key1", EntryType.user);
Entry userEntry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp2, "key2", EntryType.user);
Entry systemEntry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp3, "key1", EntryType.system);
postgresDataAccessLayer.appendEntry(userEntry1);
postgresDataAccessLayer.appendEntry(userEntry2);
postgresDataAccessLayer.appendEntry(systemEntry1);
assertThat(postgresDataAccessLayer.getLastUpdatedTime(), is(Optional.of(timestamp2)));
}
@Test
public void canAddAndGetItem() throws IOException {
Item item1 = new Item(new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "itemhash1-blob-hash"), objectMapper.readTree("{\"field\":\"foo\"}"));
Item item2 = new Item(new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "itemhash2-blob-hash"), objectMapper.readTree("{\"field\":\"bar\"}"));
postgresDataAccessLayer.addItem(item1);
postgresDataAccessLayer.addItem(item2);
assertThat(postgresDataAccessLayer.getItemByV1Hash(new HashValue(SHA256, "itemhash1")), is(Optional.of(item1)));
assertThat(postgresDataAccessLayer.getItemByV1Hash(new HashValue(SHA256, "itemhash2")), is(Optional.of(item2)));
assertThat(postgresDataAccessLayer.getItemByV1Hash(new HashValue(SHA256, "itemhash3")), is(Optional.empty()));
}
@Test
public void canAddItemsInBatch() throws IOException {
Item item1 = new Item(new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "itemhash1-blob-hash"), objectMapper.readTree("{\"field\":\"foo\"}"));
Item item2 = new Item(new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "itemhash2-blob-hash"), objectMapper.readTree("{\"field\":\"bar\"}"));
postgresDataAccessLayer.addItems(Arrays.asList(item1, item2));
assertThat(postgresDataAccessLayer.getItemByV1Hash(new HashValue(SHA256, "itemhash1")), is(Optional.of(item1)));
assertThat(postgresDataAccessLayer.getItemByV1Hash(new HashValue(SHA256, "itemhash2")), is(Optional.of(item2)));
assertThat(postgresDataAccessLayer.getItemByV1Hash(new HashValue(SHA256, "itemhash3")), is(Optional.empty()));
}
@Test
public void doesNotDuplicateItems() throws IOException {
Item item1 = new Item(new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "itemhash1-blob-hash"), objectMapper.readTree("{\"field\":\"foo\"}"));
Item item2 = new Item(new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "itemhash2-blob-hash"), objectMapper.readTree("{\"field\":\"bar\"}"));
postgresDataAccessLayer.addItem(item1);
postgresDataAccessLayer.addItem(item2);
postgresDataAccessLayer.addItem(item1);
assertThat(postgresDataAccessLayer.getItemByV1Hash(new HashValue(SHA256, "itemhash1")), is(Optional.of(item1)));
assertThat(postgresDataAccessLayer.getItemByV1Hash(new HashValue(SHA256, "itemhash2")), is(Optional.of(item2)));
assertThat(postgresDataAccessLayer.getItemByV1Hash(new HashValue(SHA256, "itemhash3")), is(Optional.empty()));
}
@Test
public void canGetMultipleItems() throws IOException {
assertThat(postgresDataAccessLayer.getAllItems().size(), is(0));
Item item1 = new Item(new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "itemhash1-blob-hash"), objectMapper.readTree("{\"field\":\"foo\"}"));
Item item2 = new Item(new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "itemhash2-blob-hash"), objectMapper.readTree("{\"field\":\"bar\"}"));
postgresDataAccessLayer.addItem(item1);
postgresDataAccessLayer.addItem(item2);
assertThat(postgresDataAccessLayer.getAllItems().size(), is(2));
assertThat(postgresDataAccessLayer.getAllItems(), contains(item1, item2));
}
@Test
public void canGetItemIterator() throws IOException {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry userEntry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry userEntry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
Entry userEntry3 = new Entry(3, new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "blobhash3"), timestamp, "key3", EntryType.user);
Entry userEntry4 = new Entry(4, new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "blobhash4"), timestamp, "key4", EntryType.user);
Item item1 = new Item(new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "itemhash1-blob-hash"), objectMapper.readTree("{\"field\":\"value1\"}"));
Item item2 = new Item(new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "itemhash2-blob-hash"), objectMapper.readTree("{\"field\":\"value2\"}"));
Item item3 = new Item(new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "itemhash3-blob-hash"), objectMapper.readTree("{\"field\":\"value3\"}"));
Item item4 = new Item(new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "itemhash4-blob-hash"), objectMapper.readTree("{\"field\":\"value4\"}"));
Entry systemEntry1 = new Entry(1, new HashValue(SHA256, "itemhash5"), new HashValue(SHA256, "blobhash5"), timestamp, "key5", EntryType.system);
Entry systemEntry2 = new Entry(2, new HashValue(SHA256, "itemhash6"), new HashValue(SHA256, "blobhash6") , timestamp, "key6", EntryType.system);
Entry systemEntry3 = new Entry(3, new HashValue(SHA256, "itemhash7"), new HashValue(SHA256, "blobhash7"), timestamp, "key7", EntryType.system);
Entry systemEntry4 = new Entry(4, new HashValue(SHA256, "itemhash8"), new HashValue(SHA256, "blobhash8"), timestamp, "key8", EntryType.system);
Item item5 = new Item(new HashValue(SHA256, "itemhash5"), new HashValue(SHA256, "itemhash5-blob-hash"), objectMapper.readTree("{\"field\":\"value5\"}"));
Item item6 = new Item(new HashValue(SHA256, "itemhash6"), new HashValue(SHA256, "itemhash6-blob-hash"), objectMapper.readTree("{\"field\":\"value6\"}"));
Item item7 = new Item(new HashValue(SHA256, "itemhash7"), new HashValue(SHA256, "itemhash7-blob-hash"), objectMapper.readTree("{\"field\":\"value7\"}"));
Item item8 = new Item(new HashValue(SHA256, "itemhash8"), new HashValue(SHA256, "itemhash8-blob-hash"), objectMapper.readTree("{\"field\":\"value8\"}"));
postgresDataAccessLayer.appendEntry(userEntry1);
postgresDataAccessLayer.appendEntry(userEntry2);
postgresDataAccessLayer.appendEntry(userEntry3);
postgresDataAccessLayer.appendEntry(userEntry4);
postgresDataAccessLayer.addItem(item1);
postgresDataAccessLayer.addItem(item2);
postgresDataAccessLayer.addItem(item3);
postgresDataAccessLayer.addItem(item4);
postgresDataAccessLayer.appendEntry(systemEntry1);
postgresDataAccessLayer.appendEntry(systemEntry2);
postgresDataAccessLayer.appendEntry(systemEntry3);
postgresDataAccessLayer.appendEntry(systemEntry4);
postgresDataAccessLayer.addItem(item5);
postgresDataAccessLayer.addItem(item6);
postgresDataAccessLayer.addItem(item7);
postgresDataAccessLayer.addItem(item8);
assertThat(Lists.newArrayList(postgresDataAccessLayer.getItemIterator(EntryType.user)).size(), is(4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getItemIterator(EntryType.user)), containsInAnyOrder(item1, item2, item3, item4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getItemIterator(0, 10)).size(), is(4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getItemIterator(0, 10)), containsInAnyOrder(item1, item2, item3, item4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getItemIterator(1, 3)).size(), is(2));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getItemIterator(1, 3)), containsInAnyOrder(item2, item3));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getItemIterator(EntryType.system)).size(), is(4));
assertThat(Lists.newArrayList(postgresDataAccessLayer.getItemIterator(EntryType.system)), containsInAnyOrder(item5, item6, item7, item8));
}
@Test
public void canGetRecordsAndTotalRecords() throws IOException {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry userEntry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry userEntry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
Entry userEntry3 = new Entry(3, new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "blobhash3"), timestamp, "key2", EntryType.user);
Item item1 = new Item(new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "itemhash1-blob-hash"), objectMapper.readTree("{\"field\":\"value1\"}"));
Item item2 = new Item(new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "itemhash2-blob-hash"), objectMapper.readTree("{\"field\":\"value2\"}"));
Item item3 = new Item(new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "itemhash3-blob-hash"), objectMapper.readTree("{\"field\":\"value3\"}"));
Entry systemEntry1 = new Entry(1, new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "blobhash4"), timestamp, "key5", EntryType.system);
Entry systemEntry2 = new Entry(2, new HashValue(SHA256, "itemhash5"), new HashValue(SHA256, "blobhash5"), timestamp, "key6", EntryType.system);
Entry systemEntry3 = new Entry(3, new HashValue(SHA256, "itemhash6"), new HashValue(SHA256, "blobhash6"), timestamp, "key5", EntryType.system);
Item item4 = new Item(new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "itemhash4-blob-hash"), objectMapper.readTree("{\"field\":\"value4\"}"));
Item item5 = new Item(new HashValue(SHA256, "itemhash5"), new HashValue(SHA256, "itemhash5-blob-hash"), objectMapper.readTree("{\"field\":\"value5\"}"));
Item item6 = new Item(new HashValue(SHA256, "itemhash6"), new HashValue(SHA256, "itemhash6-blob-hash"), objectMapper.readTree("{\"field\":\"value6\"}"));
postgresDataAccessLayer.appendEntry(userEntry1);
postgresDataAccessLayer.appendEntry(userEntry2);
postgresDataAccessLayer.appendEntry(userEntry3);
postgresDataAccessLayer.addItem(item1);
postgresDataAccessLayer.addItem(item2);
postgresDataAccessLayer.addItem(item3);
postgresDataAccessLayer.appendEntry(systemEntry1);
postgresDataAccessLayer.appendEntry(systemEntry2);
postgresDataAccessLayer.appendEntry(systemEntry3);
postgresDataAccessLayer.addItem(item4);
postgresDataAccessLayer.addItem(item5);
postgresDataAccessLayer.addItem(item6);
Record userRecord1 = new Record(userEntry1, item1);
Record userRecord2 = new Record(userEntry3, item3);
Record systemRecord1 = new Record(systemEntry2, item5);
Record systemRecord2 = new Record(systemEntry3, item6);
assertThat(postgresDataAccessLayer.getRecord(EntryType.user, "key1"), is(Optional.of(userRecord1)));
assertThat(postgresDataAccessLayer.getRecord(EntryType.user, "key2"), is(Optional.of(userRecord2)));
assertThat(postgresDataAccessLayer.getRecord(EntryType.user, "key3"), is(Optional.empty()));
assertThat(postgresDataAccessLayer.getRecords(EntryType.user, 10, 0).size(), is(2));
assertThat(postgresDataAccessLayer.getRecords(EntryType.user, 10, 0), contains(userRecord2, userRecord1));
assertThat(postgresDataAccessLayer.getRecords(EntryType.user, 10, 1).size(), is(1));
assertThat(postgresDataAccessLayer.getRecords(EntryType.user, 10, 1), contains(userRecord1));
assertThat(postgresDataAccessLayer.getRecords(EntryType.user, 1, 0).size(), is(1));
assertThat(postgresDataAccessLayer.getRecords(EntryType.user, 1, 0), contains(userRecord2));
assertThat(postgresDataAccessLayer.getTotalRecords(EntryType.user), is(2));
assertThat(postgresDataAccessLayer.getRecord(EntryType.system, "key1"), is(Optional.empty()));
assertThat(postgresDataAccessLayer.getRecord(EntryType.system, "key5"), is(Optional.of(systemRecord2)));
assertThat(postgresDataAccessLayer.getRecord(EntryType.system, "key6"), is(Optional.of(systemRecord1)));
assertThat(postgresDataAccessLayer.getRecords(EntryType.system, 10, 0).size(), is(2));
assertThat(postgresDataAccessLayer.getRecords(EntryType.system, 10, 0), contains(systemRecord2, systemRecord1));
assertThat(postgresDataAccessLayer.getRecords(EntryType.system, 10, 1).size(), is(1));
assertThat(postgresDataAccessLayer.getRecords(EntryType.system, 10, 1), contains(systemRecord1));
assertThat(postgresDataAccessLayer.getRecords(EntryType.system, 1, 0).size(), is(1));
assertThat(postgresDataAccessLayer.getRecords(EntryType.system, 1, 0), contains(systemRecord2));
assertThat(postgresDataAccessLayer.getTotalRecords(EntryType.system), is(2));
}
@Test
public void canGetRecordsByKeyValue() throws IOException {
Instant timestamp = Instant.now().truncatedTo(ChronoUnit.SECONDS);
Entry userEntry1 = new Entry(1, new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "blobhash1"), timestamp, "key1", EntryType.user);
Entry userEntry2 = new Entry(2, new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "blobhash2"), timestamp, "key2", EntryType.user);
Entry userEntry3 = new Entry(3, new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "blobhash3"), timestamp, "key2", EntryType.user);
Entry userEntry4 = new Entry(4, new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "blobhash4"), timestamp, "key3", EntryType.user);
Item item1 = new Item(new HashValue(SHA256, "itemhash1"), new HashValue(SHA256, "itemhash1-blob-hash"), objectMapper.readTree("{\"field\":\"value1\"}"));
Item item2 = new Item(new HashValue(SHA256, "itemhash2"), new HashValue(SHA256, "itemhash2-blob-hash"), objectMapper.readTree("{\"field\":\"value1\"}"));
Item item3 = new Item(new HashValue(SHA256, "itemhash3"), new HashValue(SHA256, "itemhash3-blob-hash"), objectMapper.readTree("{\"field\":\"value2\"}"));
Item item4 = new Item(new HashValue(SHA256, "itemhash4"), new HashValue(SHA256, "itemhash4-blob-hash"), objectMapper.readTree("{\"field\":\"value2\"}"));
Entry systemEntry1 = new Entry(1, new HashValue(SHA256, "itemhash5"), new HashValue(SHA256, "blobhash5"), timestamp, "key5", EntryType.system);
Entry systemEntry2 = new Entry(2, new HashValue(SHA256, "itemhash6"), new HashValue(SHA256, "blobhash6") , timestamp, "key6", EntryType.system);
Entry systemEntry3 = new Entry(3, new HashValue(SHA256, "itemhash7"), new HashValue(SHA256, "blobhash7"), timestamp, "key5", EntryType.system);
Item item5 = new Item(new HashValue(SHA256, "itemhash5"), new HashValue(SHA256, "itemhash5-blob-hash"), objectMapper.readTree("{\"field\":\"value3\"}"));
Item item6 = new Item(new HashValue(SHA256, "itemhash6"), new HashValue(SHA256, "itemhash6-blob-hash"), objectMapper.readTree("{\"field\":\"value4\"}"));
Item item7 = new Item(new HashValue(SHA256, "itemhash7"), new HashValue(SHA256, "itemhash7-blob-hash"), objectMapper.readTree("{\"field\":\"value4\"}"));
postgresDataAccessLayer.appendEntry(userEntry1);
postgresDataAccessLayer.appendEntry(userEntry2);
postgresDataAccessLayer.appendEntry(userEntry3);
postgresDataAccessLayer.appendEntry(userEntry4);
postgresDataAccessLayer.addItem(item1);
postgresDataAccessLayer.addItem(item2);
postgresDataAccessLayer.addItem(item3);
postgresDataAccessLayer.addItem(item4);
postgresDataAccessLayer.appendEntry(systemEntry1);
postgresDataAccessLayer.appendEntry(systemEntry2);
postgresDataAccessLayer.appendEntry(systemEntry3);
postgresDataAccessLayer.addItem(item5);
postgresDataAccessLayer.addItem(item6);
postgresDataAccessLayer.addItem(item7);
Record userRecord1 = new Record(userEntry1, item1);
Record userRecord2 = new Record(userEntry3, item3);
Record userRecord3 = new Record(userEntry4, item4);
Record systemRecord1 = new Record(systemEntry2, item6);
Record systemRecord2 = new Record(systemEntry3, item7);
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.user, "field", "value1").size(), is(1));
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.user, "field", "value1"), contains(userRecord1));
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.user, "field", "value2").size(), is(2));
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.user, "field", "value2"), contains(userRecord3, userRecord2));
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.user, "field", "value3").size(), is(0));
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.user, "invalid-field", "value").size(), is(0));
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.system, "field", "value3").size(), is(0));
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.system, "field", "value4").size(), is(2));
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.system, "field", "value4"), contains(systemRecord2, systemRecord1));
assertThat(postgresDataAccessLayer.findMax100RecordsByKeyValue(EntryType.system, "invalid-field", "value").size(), is(0));
}
}
| |
/*
* Copyright (c) 2010-2020. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.eventsourcing;
import org.axonframework.common.stream.BlockingStream;
import org.axonframework.eventhandling.DomainEventMessage;
import org.axonframework.eventhandling.EventMessage;
import org.axonframework.eventhandling.GenericDomainEventMessage;
import org.axonframework.eventhandling.GenericEventMessage;
import org.axonframework.eventhandling.GlobalSequenceTrackingToken;
import org.axonframework.eventhandling.MultiSourceTrackingToken;
import org.axonframework.eventhandling.TrackedEventMessage;
import org.axonframework.eventsourcing.eventstore.EmbeddedEventStore;
import org.axonframework.eventsourcing.eventstore.inmemory.InMemoryEventStorageEngine;
import org.axonframework.eventsourcing.utils.MockException;
import org.axonframework.messaging.Message;
import org.axonframework.messaging.StreamableMessageSource;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Comparator;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
class MultiStreamableMessageSourceTest {
private MultiStreamableMessageSource testSubject;
private EmbeddedEventStore eventStoreA;
private EmbeddedEventStore eventStoreB;
@BeforeEach
void setUp() {
eventStoreA = EmbeddedEventStore.builder().storageEngine(new InMemoryEventStorageEngine()).build();
eventStoreB = EmbeddedEventStore.builder().storageEngine(new InMemoryEventStorageEngine()).build();
testSubject = MultiStreamableMessageSource.builder()
.addMessageSource("eventStoreA", eventStoreA)
.addMessageSource("eventStoreB", eventStoreB)
.longPollingSource("eventStoreA")
.build();
}
@Test
void simplePublishAndConsume() throws InterruptedException {
EventMessage publishedEvent = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(publishedEvent);
BlockingStream<TrackedEventMessage<?>> singleEventStream = testSubject.openStream(testSubject.createTailToken());
assertTrue(singleEventStream.hasNextAvailable());
assertEquals(publishedEvent.getPayload(), singleEventStream.nextAvailable().getPayload());
singleEventStream.close();
}
@Test
void testConnectionsAreClosedWhenOpeningFails() {
StreamableMessageSource<TrackedEventMessage<?>> source1 = mock(StreamableMessageSource.class);
StreamableMessageSource<TrackedEventMessage<?>> source2 = mock(StreamableMessageSource.class);
testSubject = MultiStreamableMessageSource.builder()
.addMessageSource("source1", source1)
.addMessageSource("source2", source2)
.build();
BlockingStream<TrackedEventMessage<?>> mockStream = mock(BlockingStream.class);
when(source1.openStream(any())).thenReturn(mockStream);
when(source2.openStream(any())).thenThrow(new MockException());
assertThrows(MockException.class, () ->
testSubject.openStream(null));
verify(mockStream).close();
verify(source1).openStream(null);
verify(source2).openStream(null);
}
@Test
void simplePublishAndConsumeDomainEventMessage() throws InterruptedException {
EventMessage<?> publishedEvent = new GenericDomainEventMessage<>("Aggregate", "id", 0, "Event1");
eventStoreA.publish(publishedEvent);
BlockingStream<TrackedEventMessage<?>> singleEventStream = testSubject.openStream(testSubject.createTailToken());
assertTrue(singleEventStream.hasNextAvailable());
TrackedEventMessage<?> actual = singleEventStream.nextAvailable();
assertEquals(publishedEvent.getPayload(), actual.getPayload());
assertTrue(actual instanceof DomainEventMessage);
singleEventStream.close();
}
@Test
void testPeekingLastMessageKeepsItAvailable() throws InterruptedException {
EventMessage<?> publishedEvent1 = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(publishedEvent1);
BlockingStream<TrackedEventMessage<?>> stream = testSubject.openStream(null);
assertEquals("Event1", stream.peek().map(Message::getPayload).map(Object::toString).orElse("None"));
assertTrue(stream.hasNextAvailable());
assertTrue(stream.hasNextAvailable(10, TimeUnit.SECONDS));
}
@Test
void openStreamWithWrongToken() {
assertThrows(IllegalArgumentException.class, () -> testSubject.openStream(new GlobalSequenceTrackingToken(0L)));
}
@Test
void openStreamWithNullTokenReturnsFirstEvent() throws InterruptedException {
EventMessage<Object> message = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(message);
BlockingStream<TrackedEventMessage<?>> actual = testSubject.openStream(null);
assertNotNull(actual);
TrackedEventMessage<?> trackedEventMessage = actual.nextAvailable();
assertEquals(message.getIdentifier(), trackedEventMessage.getIdentifier());
assertEquals(message.getPayload(), trackedEventMessage.getPayload());
}
@Test
void longPoll() throws InterruptedException {
BlockingStream<TrackedEventMessage<?>> singleEventStream = testSubject.openStream(testSubject
.createTokenAt(Instant.now()));
long beforePollTime = System.currentTimeMillis();
assertFalse(singleEventStream.hasNextAvailable(100, TimeUnit.MILLISECONDS));
long pollTime = System.currentTimeMillis() - beforePollTime;
// allow for some deviation in polling time
assertTrue(pollTime > 80, "Poll time too short: " + pollTime + "ms");
assertTrue(pollTime < 120, "Poll time too long: " + pollTime + "ms");
singleEventStream.close();
}
@Test
void longPollMessageImmediatelyAvailable() throws InterruptedException {
BlockingStream<TrackedEventMessage<?>> singleEventStream = testSubject.openStream(testSubject
.createTokenAt(Instant.now()));
EventMessage pubToStreamB = GenericEventMessage.asEventMessage("Event1");
eventStoreB.publish(pubToStreamB);
long beforePollTime = System.currentTimeMillis();
boolean hasNextAvailable = singleEventStream.hasNextAvailable(100, TimeUnit.MILLISECONDS);
long afterPollTime = System.currentTimeMillis();
assertTrue(hasNextAvailable);
assertTrue(afterPollTime - beforePollTime < 10);
singleEventStream.close();
}
@Test
void multiPublishAndConsume() throws InterruptedException {
EventMessage pubToStreamA = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(pubToStreamA);
Thread.sleep(20);
EventMessage pubToStreamB = GenericEventMessage.asEventMessage("Event2");
eventStoreB.publish(pubToStreamB);
BlockingStream<TrackedEventMessage<?>> singleEventStream = testSubject.openStream(testSubject
.createTokenAt(Instant.now()));
assertTrue(singleEventStream.hasNextAvailable());
//order published must be same as order consumed
assertEquals(pubToStreamA.getPayload(), singleEventStream.nextAvailable().getPayload());
assertEquals(pubToStreamB.getPayload(), singleEventStream.nextAvailable().getPayload());
assertFalse(singleEventStream.hasNextAvailable());
singleEventStream.close();
}
@Test
void peek() throws InterruptedException {
EventMessage publishedEvent = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(publishedEvent);
BlockingStream<TrackedEventMessage<?>> singleEventStream = testSubject.openStream(testSubject
.createTokenAt(Instant.now()));
assertTrue(singleEventStream.peek().isPresent());
assertEquals(publishedEvent.getPayload(), singleEventStream.peek().get().getPayload());
//message is still consumable
assertEquals(publishedEvent.getPayload(), singleEventStream.nextAvailable().getPayload());
singleEventStream.close();
}
@Test
void peekWithMultipleStreams() throws InterruptedException {
EventMessage pubToStreamA = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(pubToStreamA);
Thread.sleep(20);
EventMessage pubToStreamB = GenericEventMessage.asEventMessage("Event2");
eventStoreB.publish(pubToStreamB);
BlockingStream<TrackedEventMessage<?>> singleEventStream = testSubject.openStream(testSubject
.createTokenAt(Instant.now()));
assertTrue(singleEventStream.peek().isPresent());
TrackedEventMessage peekedMessageA = singleEventStream.peek().get();
MultiSourceTrackingToken tokenA = (MultiSourceTrackingToken) peekedMessageA.trackingToken();
assertEquals(pubToStreamA.getPayload(), peekedMessageA.getPayload());
//message is still consumable and consumed message equal to peeked
assertEquals(peekedMessageA.getPayload(), singleEventStream.nextAvailable().getPayload());
//peek and consume another
assertTrue(singleEventStream.peek().isPresent());
TrackedEventMessage peekedMessageB = singleEventStream.peek().get();
MultiSourceTrackingToken tokenB = (MultiSourceTrackingToken) peekedMessageB.trackingToken();
assertEquals(pubToStreamB.getPayload(), peekedMessageB.getPayload());
assertEquals(peekedMessageB.getPayload(), singleEventStream.nextAvailable().getPayload());
//consuming from second stream doesn't alter token from first stream
assertEquals(tokenA.getTokenForStream("eventStoreA"), tokenB.getTokenForStream("eventStoreA"));
singleEventStream.close();
}
@Test
void createTailToken() {
EventMessage pubToStreamA = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(pubToStreamA);
EventMessage pubToStreamB = GenericEventMessage.asEventMessage("Event2");
eventStoreB.publish(pubToStreamB);
MultiSourceTrackingToken tailToken = testSubject.createTailToken();
assertEquals(-1L, tailToken.getTokenForStream("eventStoreA").position().getAsLong());
assertEquals(-1L, tailToken.getTokenForStream("eventStoreB").position().getAsLong());
}
@Test
void createHeadToken() {
EventMessage pubToStreamA = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(pubToStreamA);
EventMessage pubToStreamB = GenericEventMessage.asEventMessage("Event2");
eventStoreB.publish(pubToStreamB);
eventStoreB.publish(pubToStreamB);
MultiSourceTrackingToken headToken = testSubject.createHeadToken();
assertEquals(0L, headToken.getTokenForStream("eventStoreA").position().getAsLong());
assertEquals(1L, headToken.getTokenForStream("eventStoreB").position().getAsLong());
}
@Test
void createTokenAt() throws InterruptedException {
EventMessage pubToStreamA = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(pubToStreamA);
eventStoreA.publish(pubToStreamA);
Thread.sleep(20);
EventMessage pubToStreamB = GenericEventMessage.asEventMessage("Event2");
eventStoreB.publish(pubToStreamB);
MultiSourceTrackingToken createdAtToken = testSubject.createTokenAt(Instant.now().minus(10, ChronoUnit.MILLIS));
//token should track events in eventStoreB and skip those in eventStoreA
assertNull(createdAtToken.getTokenForStream("eventStoreA"));
assertEquals(-1L, createdAtToken.getTokenForStream("eventStoreB").position().getAsLong());
}
@Test
void createTokenSince() throws InterruptedException {
EventMessage pubToStreamA = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(pubToStreamA);
eventStoreA.publish(pubToStreamA);
Thread.sleep(20);
EventMessage pubToStreamB = GenericEventMessage.asEventMessage("Event2");
eventStoreB.publish(pubToStreamB);
MultiSourceTrackingToken createdSinceToken = testSubject.createTokenSince(Duration.ofMillis(10));
//token should track events in eventStoreB and skip those in eventStoreA
assertNull(createdSinceToken.getTokenForStream("eventStoreA"));
assertEquals(-1L, createdSinceToken.getTokenForStream("eventStoreB").position().getAsLong());
}
@Test
void configuredDifferentComparator() throws InterruptedException {
Comparator<Map.Entry<String, TrackedEventMessage<?>>> eventStoreAPriority =
Comparator.comparing((Map.Entry<String, TrackedEventMessage<?>> e) -> !e.getKey().equals("eventStoreA")).
thenComparing(e -> e.getValue().getTimestamp());
EmbeddedEventStore eventStoreC = EmbeddedEventStore.builder().storageEngine(new InMemoryEventStorageEngine())
.build();
MultiStreamableMessageSource prioritySourceTestSubject =
MultiStreamableMessageSource.builder()
.addMessageSource("eventStoreA", eventStoreA)
.addMessageSource("eventStoreB", eventStoreB)
.addMessageSource("eventStoreC", eventStoreC)
.trackedEventComparator(eventStoreAPriority)
.build();
EventMessage pubToStreamA = GenericEventMessage.asEventMessage("Event1");
eventStoreA.publish(pubToStreamA);
eventStoreA.publish(pubToStreamA);
eventStoreA.publish(pubToStreamA);
EventMessage pubToStreamC = GenericEventMessage.asEventMessage("Event2");
eventStoreC.publish(pubToStreamC);
Thread.sleep(5);
EventMessage pubToStreamB = GenericEventMessage.asEventMessage("Event3");
eventStoreB.publish(pubToStreamB);
BlockingStream<TrackedEventMessage<?>> singleEventStream = prioritySourceTestSubject.openStream(
prioritySourceTestSubject.createTailToken());
singleEventStream.nextAvailable();
singleEventStream.nextAvailable();
singleEventStream.nextAvailable();
assertTrue(singleEventStream.nextAvailable().getPayload().equals(pubToStreamC.getPayload()));
assertTrue(singleEventStream.nextAvailable().getPayload().equals(pubToStreamB.getPayload()));
}
}
| |
/* Copyright (c) 2014, 2015 Qualcomm Technologies Inc
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted (subject to the limitations in the disclaimer below) provided that
the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of Qualcomm Technologies Inc nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
package org.firstinspires.ftc.robotcontroller.internal;
import android.app.ActionBar;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.hardware.usb.UsbDevice;
import android.hardware.usb.UsbManager;
import android.net.wifi.WifiManager;
import android.os.Bundle;
import android.os.IBinder;
import android.preference.PreferenceManager;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.webkit.WebView;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.google.blocks.ftcrobotcontroller.BlocksActivity;
import com.google.blocks.ftcrobotcontroller.ProgrammingModeActivity;
import com.google.blocks.ftcrobotcontroller.ProgrammingModeControllerImpl;
import com.google.blocks.ftcrobotcontroller.runtime.BlocksOpMode;
import com.qualcomm.ftccommon.AboutActivity;
import com.qualcomm.ftccommon.ClassManagerFactory;
import com.qualcomm.ftccommon.FtcEventLoop;
import com.qualcomm.ftccommon.FtcEventLoopIdle;
import com.qualcomm.ftccommon.FtcRobotControllerService;
import com.qualcomm.ftccommon.FtcRobotControllerService.FtcRobotControllerBinder;
import com.qualcomm.ftccommon.FtcRobotControllerSettingsActivity;
import com.qualcomm.ftccommon.LaunchActivityConstantsList;
import com.qualcomm.ftccommon.ProgrammingModeController;
import com.qualcomm.ftccommon.Restarter;
import com.qualcomm.ftccommon.UpdateUI;
import com.qualcomm.ftccommon.configuration.EditParameters;
import com.qualcomm.ftccommon.configuration.FtcLoadFileActivity;
import com.qualcomm.ftccommon.configuration.RobotConfigFile;
import com.qualcomm.ftccommon.configuration.RobotConfigFileManager;
import com.qualcomm.ftcrobotcontroller.R;
import com.qualcomm.hardware.HardwareFactory;
import com.qualcomm.robotcore.eventloop.opmode.OpModeRegister;
import com.qualcomm.robotcore.hardware.configuration.LynxConstants;
import com.qualcomm.robotcore.hardware.configuration.Utility;
import com.qualcomm.robotcore.robocol.PeerAppRobotController;
import com.qualcomm.robotcore.util.Dimmer;
import com.qualcomm.robotcore.util.ImmersiveMode;
import com.qualcomm.robotcore.util.RobotLog;
import com.qualcomm.robotcore.wifi.NetworkConnectionFactory;
import com.qualcomm.robotcore.wifi.NetworkType;
import com.qualcomm.robotcore.wifi.WifiDirectAssistant;
import org.firstinspires.ftc.ftccommon.external.SoundPlayingRobotMonitor;
import org.firstinspires.ftc.robotcore.internal.AppUtil;
import org.firstinspires.ftc.robotcore.internal.DragonboardLynxDragonboardIsPresentPin;
import org.firstinspires.ftc.robotcore.internal.PreferencesHelper;
import org.firstinspires.ftc.robotcore.internal.UILocation;
import org.firstinspires.ftc.robotcore.internal.network.DeviceNameManager;
import org.firstinspires.ftc.robotcore.internal.network.PreferenceRemoterRC;
import org.firstinspires.ftc.robotcore.internal.network.StartResult;
import org.firstinspires.inspection.RcInspectionActivity;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
public class FtcRobotControllerActivity extends Activity {
public static final String TAG = "RCActivity";
private static final int REQUEST_CONFIG_WIFI_CHANNEL = 1;
private static final int NUM_GAMEPADS = 2;
protected WifiManager.WifiLock wifiLock;
protected RobotConfigFileManager cfgFileMgr;
protected ProgrammingModeController programmingModeController;
protected UpdateUI.Callback callback;
protected Context context;
protected Utility utility;
protected AppUtil appUtil = AppUtil.getInstance();
protected StartResult deviceNameManagerStartResult = new StartResult();
protected StartResult prefRemoterStartResult = new StartResult();
protected PreferencesHelper preferencesHelper;
protected ImageButton buttonMenu;
protected TextView textDeviceName;
protected TextView textNetworkConnectionStatus;
protected TextView textRobotStatus;
protected TextView[] textGamepad = new TextView[NUM_GAMEPADS];
protected TextView textOpMode;
protected TextView textErrorMessage;
protected ImmersiveMode immersion;
protected UpdateUI updateUI;
protected Dimmer dimmer;
protected LinearLayout entireScreenLayout;
protected FtcRobotControllerService controllerService;
protected NetworkType networkType;
protected FtcEventLoop eventLoop;
protected Queue<UsbDevice> receivedUsbAttachmentNotifications;
protected class RobotRestarter implements Restarter {
public void requestRestart() {
requestRobotRestart();
}
}
protected ServiceConnection connection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
FtcRobotControllerBinder binder = (FtcRobotControllerBinder) service;
onServiceBind(binder.getService());
}
@Override
public void onServiceDisconnected(ComponentName name) {
RobotLog.vv(FtcRobotControllerService.TAG, "%s.controllerService=null", TAG);
controllerService = null;
}
};
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
if (UsbManager.ACTION_USB_DEVICE_ATTACHED.equals(intent.getAction())) {
UsbDevice usbDevice = intent.getParcelableExtra(UsbManager.EXTRA_DEVICE);
RobotLog.vv(TAG, "ACTION_USB_DEVICE_ATTACHED: %s", usbDevice.getDeviceName());
if (usbDevice != null) { // paranoia
// We might get attachment notifications before the event loop is set up, so
// we hold on to them and pass them along only when we're good and ready.
if (receivedUsbAttachmentNotifications != null) { // *total* paranoia
receivedUsbAttachmentNotifications.add(usbDevice);
passReceivedUsbAttachmentsToEventLoop();
}
}
}
}
protected void passReceivedUsbAttachmentsToEventLoop() {
if (this.eventLoop != null) {
for (;;) {
UsbDevice usbDevice = receivedUsbAttachmentNotifications.poll();
if (usbDevice == null)
break;
this.eventLoop.onUsbDeviceAttached(usbDevice);
}
}
else {
// Paranoia: we don't want the pending list to grow without bound when we don't
// (yet) have an event loop
while (receivedUsbAttachmentNotifications.size() > 100) {
receivedUsbAttachmentNotifications.poll();
}
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
RobotLog.writeLogcatToDisk();
RobotLog.vv(TAG, "onCreate()");
// Quick check: should we pretend we're not here, and so allow the Lynx to operate as
// a stand-alone USB-connected module?
if (LynxConstants.isDragonboardWithEmbeddedLynxModule()) {
if (LynxConstants.disableDragonboard()) {
// Double-sure check that the Lynx Module can operate over USB, etc, then get out of Dodge
RobotLog.vv(TAG, "disabling Dragonboard and exiting robot controller");
DragonboardLynxDragonboardIsPresentPin.getInstance().setState(false);
AppUtil.getInstance().finishRootActivityAndExitApp();
}
else {
// Double-sure check that we can talk to the DB over the serial TTY
DragonboardLynxDragonboardIsPresentPin.getInstance().setState(true);
}
}
context = this;
utility = new Utility(this);
appUtil.setThisApp(new PeerAppRobotController(context));
DeviceNameManager.getInstance().start(deviceNameManagerStartResult);
PreferenceRemoterRC.getInstance().start(prefRemoterStartResult);
receivedUsbAttachmentNotifications = new ConcurrentLinkedQueue<UsbDevice>();
eventLoop = null;
setContentView(R.layout.activity_ftc_controller);
preferencesHelper = new PreferencesHelper(TAG, context);
preferencesHelper.writeBooleanPrefIfDifferent(context.getString(R.string.pref_rc_connected), true);
entireScreenLayout = (LinearLayout) findViewById(R.id.entire_screen);
buttonMenu = (ImageButton) findViewById(R.id.menu_buttons);
buttonMenu.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
AppUtil.getInstance().openOptionsMenuFor(FtcRobotControllerActivity.this);
}
});
BlocksOpMode.setActivityAndWebView(this, (WebView) findViewById(R.id.webViewBlocksRuntime));
ClassManagerFactory.processClasses();
cfgFileMgr = new RobotConfigFileManager(this);
// Clean up 'dirty' status after a possible crash
RobotConfigFile configFile = cfgFileMgr.getActiveConfig();
if (configFile.isDirty()) {
configFile.markClean();
cfgFileMgr.setActiveConfig(false, configFile);
}
textDeviceName = (TextView) findViewById(R.id.textDeviceName);
textNetworkConnectionStatus = (TextView) findViewById(R.id.textNetworkConnectionStatus);
textRobotStatus = (TextView) findViewById(R.id.textRobotStatus);
textOpMode = (TextView) findViewById(R.id.textOpMode);
textErrorMessage = (TextView) findViewById(R.id.textErrorMessage);
textGamepad[0] = (TextView) findViewById(R.id.textGamepad1);
textGamepad[1] = (TextView) findViewById(R.id.textGamepad2);
immersion = new ImmersiveMode(getWindow().getDecorView());
dimmer = new Dimmer(this);
dimmer.longBright();
programmingModeController = new ProgrammingModeControllerImpl(
this, (TextView) findViewById(R.id.textRemoteProgrammingMode));
updateUI = createUpdateUI();
callback = createUICallback(updateUI);
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
WifiManager wifiManager = (WifiManager) getSystemService(Context.WIFI_SERVICE);
wifiLock = wifiManager.createWifiLock(WifiManager.WIFI_MODE_FULL_HIGH_PERF, "");
hittingMenuButtonBrightensScreen();
wifiLock.acquire();
callback.networkConnectionUpdate(WifiDirectAssistant.Event.DISCONNECTED);
readNetworkType();
startWatchdogService();
bindToService();
logPackageVersions();
}
protected UpdateUI createUpdateUI() {
Restarter restarter = new RobotRestarter();
UpdateUI result = new UpdateUI(this, dimmer);
result.setRestarter(restarter);
result.setTextViews(textNetworkConnectionStatus, textRobotStatus, textGamepad, textOpMode, textErrorMessage, textDeviceName);
return result;
}
protected UpdateUI.Callback createUICallback(UpdateUI updateUI) {
UpdateUI.Callback result = updateUI.new Callback();
result.setStateMonitor(new SoundPlayingRobotMonitor());
return result;
}
@Override
protected void onStart() {
super.onStart();
RobotLog.vv(TAG, "onStart()");
// If we're start()ing after a stop(), then shut the old robot down so
// we can refresh it with new state (e.g., with new hw configurations)
shutdownRobot();
updateUIAndRequestRobotSetup();
cfgFileMgr.getActiveConfigAndUpdateUI();
entireScreenLayout.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
dimmer.handleDimTimer();
return false;
}
});
}
@Override
protected void onResume() {
super.onResume();
RobotLog.vv(TAG, "onResume()");
}
@Override
public void onPause() {
super.onPause();
RobotLog.vv(TAG, "onPause()");
if (programmingModeController.isActive()) {
programmingModeController.stopProgrammingMode();
}
}
@Override
protected void onStop() {
// Note: this gets called even when the configuration editor is launched. That is, it gets
// called surprisingly often. So, we don't actually do much here.
super.onStop();
RobotLog.vv(TAG, "onStop()");
}
@Override
public void onDestroy() {
super.onDestroy();
RobotLog.vv(TAG, "onDestroy()");
shutdownRobot(); // Ensure the robot is put away to bed
if (callback != null) callback.close();
PreferenceRemoterRC.getInstance().start(prefRemoterStartResult);
DeviceNameManager.getInstance().stop(deviceNameManagerStartResult);
unbindFromService();
stopWatchdogService();
wifiLock.release();
RobotLog.cancelWriteLogcatToDisk();
}
protected void bindToService() {
readNetworkType();
Intent intent = new Intent(this, FtcRobotControllerService.class);
intent.putExtra(NetworkConnectionFactory.NETWORK_CONNECTION_TYPE, networkType);
bindService(intent, connection, Context.BIND_AUTO_CREATE);
}
protected Intent getWatchdogServiceIntent() {
return new Intent(context, FtcRobotControllerWatchdogService.class);
}
protected void startWatchdogService() {
RobotLog.vv(TAG, "startWatchdogService()");
Intent intent = getWatchdogServiceIntent();
try {
ComponentName componentName = context.startService(intent);
if (componentName == null) {
RobotLog.ee(TAG, "watchdog service does not exist");
} else {
RobotLog.vv(TAG, "watchdog service = %s", componentName);
}
} catch (SecurityException e) {
RobotLog.logExceptionHeader(TAG, e, "unable to start watchdog service");
}
}
protected void stopWatchdogService() {
RobotLog.vv(TAG, "stopWatchdogService()");
Intent intent = getWatchdogServiceIntent();
try {
context.stopService(intent);
} catch (SecurityException e) {
RobotLog.logExceptionHeader(TAG, e, "unable to stop watchdog service");
}
}
protected void unbindFromService() {
if (controllerService != null) {
unbindService(connection);
}
}
protected void logPackageVersions() {
RobotLog.logBuildConfig(com.qualcomm.ftcrobotcontroller.BuildConfig.class);
RobotLog.logBuildConfig(com.qualcomm.robotcore.BuildConfig.class);
RobotLog.logBuildConfig(com.qualcomm.hardware.BuildConfig.class);
RobotLog.logBuildConfig(com.qualcomm.ftccommon.BuildConfig.class);
RobotLog.logBuildConfig(com.google.blocks.BuildConfig.class);
RobotLog.logBuildConfig(org.firstinspires.inspection.BuildConfig.class);
}
protected void readNetworkType() {
// The code here used to defer to the value found in a configuration file
// to configure the network type. If the file was absent, then it initialized
// it with a default.
//
// However, bugs have been reported with that approach (empty config files, specifically).
// Moreover, the non-Wifi-Direct networking is end-of-life, so the simplest and most robust
// (e.g.: no one can screw things up by messing with the contents of the config file) fix is
// to do away with configuration file entirely.
networkType = NetworkType.WIFIDIRECT;
programmingModeController.setCurrentNetworkType(networkType);
// update the preferences
preferencesHelper.writeStringPrefIfDifferent(context.getString(com.qualcomm.robotcore.R.string.pref_network_connection_type), networkType.toString());
}
@Override
public void onWindowFocusChanged(boolean hasFocus){
super.onWindowFocusChanged(hasFocus);
// When the window loses focus (e.g., the action overflow is shown),
// cancel any pending hide action. When the window gains focus,
// hide the system UI.
if (hasFocus) {
if (ImmersiveMode.apiOver19()){
// Immersive flag only works on API 19 and above.
immersion.hideSystemUI();
}
} else {
immersion.cancelSystemUIHide();
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.ftc_robot_controller, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_programming_mode) {
if (cfgFileMgr.getActiveConfig().isNoConfig()) {
// Tell the user they must configure the robot before starting programming mode.
AppUtil.getInstance().showToast(UILocation.BOTH, context, context.getString(R.string.toastConfigureRobotBeforeProgrammingMode));
} else {
Intent programmingModeIntent = new Intent(ProgrammingModeActivity.launchIntent);
programmingModeIntent.putExtra(
LaunchActivityConstantsList.PROGRAMMING_MODE_ACTIVITY_NETWORK_TYPE, networkType);
startActivity(programmingModeIntent);
}
return true;
} else if (id == R.id.action_inspection_mode) {
Intent inspectionModeIntent = new Intent(RcInspectionActivity.rcLaunchIntent);
startActivity(inspectionModeIntent);
return true;
}
else if (id == R.id.action_blocks) {
Intent blocksIntent = new Intent(BlocksActivity.launchIntent);
startActivity(blocksIntent);
return true;
}
else if (id == R.id.action_restart_robot) {
dimmer.handleDimTimer();
AppUtil.getInstance().showToast(UILocation.BOTH, context, context.getString(R.string.toastRestartingRobot));
requestRobotRestart();
return true;
}
else if (id == R.id.action_configure_robot) {
EditParameters parameters = new EditParameters();
Intent intentConfigure = new Intent(FtcLoadFileActivity.launchIntent);
parameters.putIntent(intentConfigure);
startActivityForResult(intentConfigure, LaunchActivityConstantsList.FTC_CONFIGURE_REQUEST_CODE_ROBOT_CONTROLLER);
}
else if (id == R.id.action_settings) {
Intent settingsIntent = new Intent(FtcRobotControllerSettingsActivity.launchIntent);
startActivityForResult(settingsIntent, LaunchActivityConstantsList.FTC_CONFIGURE_REQUEST_CODE_ROBOT_CONTROLLER);
return true;
}
else if (id == R.id.action_about) {
Intent intent = new Intent(AboutActivity.launchIntent);
intent.putExtra(LaunchActivityConstantsList.ABOUT_ACTIVITY_CONNECTION_TYPE, networkType);
startActivity(intent);
return true;
}
else if (id == R.id.action_exit_app) {
finish();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// don't destroy assets on screen rotation
}
@Override
protected void onActivityResult(int request, int result, Intent intent) {
if (request == REQUEST_CONFIG_WIFI_CHANNEL) {
if (result == RESULT_OK) {
AppUtil.getInstance().showToast(UILocation.BOTH, context, context.getString(R.string.toastWifiConfigurationComplete));
}
}
if (request == LaunchActivityConstantsList.FTC_CONFIGURE_REQUEST_CODE_ROBOT_CONTROLLER) {
// We always do a refresh, whether it was a cancel or an OK, for robustness
cfgFileMgr.getActiveConfigAndUpdateUI();
}
}
public void onServiceBind(FtcRobotControllerService service) {
RobotLog.vv(FtcRobotControllerService.TAG, "%s.controllerService=bound", TAG);
controllerService = service;
updateUI.setControllerService(controllerService);
updateUIAndRequestRobotSetup();
}
private void updateUIAndRequestRobotSetup() {
if (controllerService != null) {
callback.networkConnectionUpdate(controllerService.getNetworkConnectionStatus());
callback.updateRobotStatus(controllerService.getRobotStatus());
requestRobotSetup();
}
}
private void requestRobotSetup() {
if (controllerService == null) return;
HardwareFactory factory;
RobotConfigFile file = cfgFileMgr.getActiveConfigAndUpdateUI();
HardwareFactory hardwareFactory = new HardwareFactory(context);
try {
hardwareFactory.setXmlPullParser(file.getXml());
} catch (Resources.NotFoundException e) {
file = RobotConfigFile.noConfig(cfgFileMgr);
hardwareFactory.setXmlPullParser(file.getXml());
cfgFileMgr.setActiveConfigAndUpdateUI(false, file);
}
factory = hardwareFactory;
eventLoop = new FtcEventLoop(factory, createOpModeRegister(), callback, this, programmingModeController);
FtcEventLoopIdle idleLoop = new FtcEventLoopIdle(factory, callback, this, programmingModeController);
controllerService.setCallback(callback);
controllerService.setupRobot(eventLoop, idleLoop);
passReceivedUsbAttachmentsToEventLoop();
}
protected OpModeRegister createOpModeRegister() {
return new FtcOpModeRegister();
}
private void shutdownRobot() {
if (controllerService != null) controllerService.shutdownRobot();
}
private void requestRobotRestart() {
AppUtil.getInstance().showToast(UILocation.BOTH, AppUtil.getDefContext().getString(com.qualcomm.ftccommon.R.string.toastRestartingRobot));
//
shutdownRobot();
requestRobotSetup();
//
AppUtil.getInstance().showToast(UILocation.BOTH, AppUtil.getDefContext().getString(com.qualcomm.ftccommon.R.string.toastRestartRobotComplete));
}
protected void hittingMenuButtonBrightensScreen() {
ActionBar actionBar = getActionBar();
if (actionBar != null) {
actionBar.addOnMenuVisibilityListener(new ActionBar.OnMenuVisibilityListener() {
@Override
public void onMenuVisibilityChanged(boolean isVisible) {
if (isVisible) {
dimmer.handleDimTimer();
}
}
});
}
}
}
| |
package com.bitsofproof.supernode.account;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.bitsofproof.supernode.api.BCSAPI;
import com.bitsofproof.supernode.api.BCSAPIException;
import com.bitsofproof.supernode.api.Block;
import com.bitsofproof.supernode.api.Transaction;
import com.bitsofproof.supernode.api.TransactionInput;
import com.bitsofproof.supernode.api.TrunkListener;
import com.bitsofproof.supernode.common.Hash;
public class ConfirmationManager implements TrunkListener
{
private static final Logger log = LoggerFactory.getLogger (ConfirmationManager.class);
private final Set<AccountManager> accounts = Collections.synchronizedSet (new HashSet<AccountManager> ());
private final LinkedList<String> trunk = new LinkedList<> ();
private final Map<String, Set<Transaction>> inputs = new HashMap<> ();
private final Map<String, Set<Transaction>> confirmations = new HashMap<> ();
private int height;
private final Set<ConfirmationListener> confirmationListener = Collections.synchronizedSet (new HashSet<ConfirmationListener> ());
public synchronized void addAccount (AccountManager account)
{
accounts.add (account);
}
public synchronized void removeAccount (AccountManager account)
{
accounts.remove (account);
}
public synchronized void init (BCSAPI api, int trunkLength, List<String> inventory) throws BCSAPIException
{
trunk.clear ();
if ( inventory != null )
{
Collections.copy (trunk, inventory);
}
api.catchUp (trunk, trunkLength, true, this);
Block highest = api.getBlockHeader (trunk.getFirst ());
height = highest.getHeight ();
}
public synchronized void init (BCSAPI api, int trunkLength) throws BCSAPIException
{
init (api, trunkLength, null);
}
public synchronized int getHeight ()
{
return height;
}
@Override
public synchronized void trunkUpdate (List<Block> added)
{
Set<Transaction> reorgedTransactions = new HashSet<> ();
Block first = added.get (0);
if ( !trunk.isEmpty () && !trunk.getFirst ().equals (first.getPreviousHash ()) )
{
log.trace ("Chain reorg through " + first.getHash ());
if ( trunk.contains (first.getPreviousHash ()) )
{
do
{
String removed = trunk.removeFirst ();
log.trace ("Removing block " + removed);
if ( confirmations.containsKey (removed) )
{
for ( Transaction t : confirmations.get (removed) )
{
t.setBlockHash (null);
t.setBlocktime (new Date ().getTime () / 1000);
t.setHeight (0);
}
reorgedTransactions.addAll (confirmations.remove (removed));
}
} while ( !first.getPreviousHash ().equals (trunk.getFirst ()) );
}
else
{
log.trace ("Removing all blocks");
trunk.clear ();
Iterator<String> ri = confirmations.keySet ().iterator ();
while ( ri.hasNext () )
{
String removed = ri.next ();
for ( Transaction t : confirmations.get (removed) )
{
t.setBlockHash (null);
t.setBlocktime (new Date ().getTime () / 1000);
t.setHeight (0);
reorgedTransactions.add (t);
}
ri.remove ();
}
}
}
for ( Block b : added )
{
trunk.addFirst (b.getHash ());
log.trace ("New highest block " + trunk.getFirst ());
if ( b.getTransactions () != null )
{
for ( Transaction t : b.getTransactions () )
{
t.setBlockHash (b.getHash ());
t.setHeight (b.getHeight ());
t.setBlocktime (b.getCreateTime ());
reorgedTransactions.remove (t);
checkDoubleSpend (t);
boolean cache = false;
for ( AccountManager account : accounts )
{
if ( account.process (t) || account.isKnownTransaction (t) )
{
log.trace ("confirmation for " + t.getHash ());
cache = true;
}
}
if ( cache )
{
cacheTransaction (t);
notifyListener (t);
}
}
}
height = b.getHeight ();
}
for ( Transaction n : reorgedTransactions )
{
log.trace ("un-confirmed " + n.getHash ());
notifyListener (n);
}
notifyListener (null);
}
private void checkDoubleSpend (Transaction t)
{
Set<Transaction> doubleSpent = new HashSet<> ();
for ( TransactionInput input : t.getInputs () )
{
if ( inputs.containsKey (input.getSourceHash ()) )
{
for ( Transaction prev : inputs.get (input.getSourceHash ()) )
{
if ( !prev.equals (t) )
{
for ( TransactionInput pi : prev.getInputs () )
{
if ( pi.getSourceHash ().equals (input.getSourceHash ()) && pi.getIx () == input.getIx () )
{
prev.setHeight (0);
prev.setBlockHash (null);
prev.setOffendingTx (t.getHash ());
doubleSpent.add (prev);
break;
}
}
}
}
}
}
for ( Transaction f : doubleSpent )
{
log.trace ("Double spend " + t.getHash () + " replaces " + f.getHash ());
for ( AccountManager account : accounts )
{
account.process (f);
}
forgetTransaction (f);
notifyListener (f);
}
}
private void cacheTransaction (Transaction t)
{
Set<Transaction> ts = confirmations.get (t.getBlockHash ());
if ( ts == null )
{
confirmations.put (t.getBlockHash (), ts = new HashSet<Transaction> ());
}
ts.add (t);
for ( TransactionInput i : t.getInputs () )
{
if ( !i.getSourceHash ().equals (Hash.ZERO_HASH_STRING) )
{
Set<Transaction> twithi = inputs.get (i.getSourceHash ());
if ( twithi == null )
{
twithi = new HashSet<Transaction> ();
inputs.put (i.getSourceHash (), twithi);
}
twithi.add (t);
}
}
}
private void forgetTransaction (Transaction t)
{
for ( TransactionInput i : t.getInputs () )
{
inputs.remove (i.getSourceHash ());
}
}
public void addConfirmationListener (ConfirmationListener listener)
{
confirmationListener.add (listener);
}
public void removeConfirmationListener (ConfirmationListener listener)
{
confirmationListener.remove (listener);
}
private void notifyListener (Transaction t)
{
ArrayList<ConfirmationListener> al = new ArrayList<> ();
synchronized ( confirmationListener )
{
al.addAll (confirmationListener);
}
for ( ConfirmationListener l : al )
{
try
{
if ( t != null )
{
l.confirmed (t);
}
else
{
l.newHeight (height);
}
}
catch ( Exception e )
{
log.error ("Uncaught exception in account listener", e);
}
}
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2015-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.MissingResourceException;
import java.util.logging.Level;
import javax.swing.JDialog;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
import org.apache.commons.lang.math.NumberUtils;
import org.apache.commons.io.FileUtils;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.openide.util.NbBundle;
import org.openide.util.lookup.ServiceProvider;
import org.openide.util.lookup.ServiceProviders;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.CaseMetadata;
import org.sleuthkit.autopsy.core.RuntimeProperties;
import org.sleuthkit.autopsy.coreutils.FileUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.appservices.AutopsyService;
import org.sleuthkit.autopsy.progress.ProgressIndicator;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
/**
* An implementation of the KeywordSearchService interface that uses Solr for
* text indexing and search.
*/
@ServiceProviders(value = {
@ServiceProvider(service = KeywordSearchService.class)
,
@ServiceProvider(service = AutopsyService.class)}
)
public class SolrSearchService implements KeywordSearchService, AutopsyService {
private static final String BAD_IP_ADDRESS_FORMAT = "ioexception occurred when talking to server"; //NON-NLS
private static final String SERVER_REFUSED_CONNECTION = "server refused connection"; //NON-NLS
private static final int IS_REACHABLE_TIMEOUT_MS = 1000;
private static final int LARGE_INDEX_SIZE_GB = 50;
private static final int GIANT_INDEX_SIZE_GB = 500;
private static final Logger logger = Logger.getLogger(SolrSearchService.class.getName());
/**
* Indexes the given content for keyword search.
*
* IMPORTANT: Currently, there are two correct uses for this code:
*
* 1) Indexing an artifact created during while either the file level ingest
* module pipeline or the first stage data source level ingest module
* pipeline of an ingest job is running.
*
* 2) Indexing a report.
*
* @param content The content to index.
*
* @throws TskCoreException If there is a problem indexing the content.
*/
@Override
public void index(Content content) throws TskCoreException {
/*
* TODO (JIRA-1099): The following code has some issues that need to be
* resolved. For artifacts, it is assumed that the posting of artifacts
* is only occuring during an ingest job with an enabled keyword search
* ingest module handling index commits; it also assumes that the
* artifacts are only posted by modules in the either the file level
* ingest pipeline or the first stage data source level ingest pipeline,
* so that the artifacts will be searched during a periodic or final
* keyword search. It also assumes that the only other type of Content
* for which this API will be called are Reports generated at a time
* when doing a commit is required and desirable, i.e., in a context
* other than an ingest job.
*/
if (content == null) {
return;
}
final Ingester ingester = Ingester.getDefault();
if (content instanceof BlackboardArtifact) {
BlackboardArtifact artifact = (BlackboardArtifact) content;
if (artifact.getArtifactID() > 0) {
/*
* Artifact indexing is only supported for artifacts that use
* negative artifact ids to avoid overlapping with the object
* ids of other types of Content.
*/
return;
}
try {
ingester.indexMetaDataOnly(artifact);
ingester.indexText(new ArtifactTextExtractor(), artifact, null);
} catch (Ingester.IngesterException ex) {
throw new TskCoreException(ex.getCause().getMessage(), ex);
}
} else {
try {
ingester.indexText(new TikaTextExtractor(), content, null);
} catch (Ingester.IngesterException ex) {
try {
// Try the StringsTextExtractor if Tika extractions fails.
ingester.indexText(new StringsTextExtractor(), content, null);
} catch (Ingester.IngesterException ex1) {
throw new TskCoreException(ex.getCause().getMessage(), ex1);
}
}
ingester.commit();
}
}
/**
* Tries to connect to the keyword search service.
*
* @param host The hostname or IP address of the service.
* @param port The port used by the service.
*
* @throws KeywordSearchServiceException if cannot connect.
*/
@Override
public void tryConnect(String host, int port) throws KeywordSearchServiceException {
HttpSolrServer solrServer = null;
if (host == null || host.isEmpty()) {
throw new KeywordSearchServiceException(NbBundle.getMessage(SolrSearchService.class, "SolrConnectionCheck.MissingHostname")); //NON-NLS
}
try {
solrServer = new HttpSolrServer("http://" + host + ":" + Integer.toString(port) + "/solr"); //NON-NLS
KeywordSearch.getServer().connectToSolrServer(solrServer);
} catch (SolrServerException ex) {
throw new KeywordSearchServiceException(NbBundle.getMessage(SolrSearchService.class, "SolrConnectionCheck.HostnameOrPort")); //NON-NLS
} catch (IOException ex) {
String result = NbBundle.getMessage(SolrSearchService.class, "SolrConnectionCheck.HostnameOrPort"); //NON-NLS
String message = ex.getCause().getMessage().toLowerCase();
if (message.startsWith(SERVER_REFUSED_CONNECTION)) {
try {
if (InetAddress.getByName(host).isReachable(IS_REACHABLE_TIMEOUT_MS)) {
// if we can reach the host, then it's probably port problem
result = Bundle.SolrConnectionCheck_Port();
} else {
result = NbBundle.getMessage(SolrSearchService.class, "SolrConnectionCheck.HostnameOrPort"); //NON-NLS
}
} catch (IOException | MissingResourceException any) {
// it may be anything
result = NbBundle.getMessage(SolrSearchService.class, "SolrConnectionCheck.HostnameOrPort"); //NON-NLS
}
} else if (message.startsWith(BAD_IP_ADDRESS_FORMAT)) {
result = NbBundle.getMessage(SolrSearchService.class, "SolrConnectionCheck.Hostname"); //NON-NLS
}
throw new KeywordSearchServiceException(result);
} catch (NumberFormatException ex) {
throw new KeywordSearchServiceException(Bundle.SolrConnectionCheck_Port());
} catch (IllegalArgumentException ex) {
throw new KeywordSearchServiceException(ex.getMessage());
} finally {
if (null != solrServer) {
solrServer.shutdown();
}
}
}
/**
* Deletes Solr core for a case.
*
* @param metadata The CaseMetadata which will have its core deleted.
*/
@NbBundle.Messages({
"# {0} - case directory", "SolrSearchService.exceptionMessage.noIndexMetadata=Unable to create IndexMetaData from case directory: {0}",
"SolrSearchService.exceptionMessage.noCurrentSolrCore=IndexMetadata did not contain a current Solr core so could not delete the case",
"# {0} - index folder path", "SolrSearchService.exceptionMessage.failedToDeleteIndexFiles=Failed to delete text index files at {0}"
})
@Override
public void deleteTextIndex(CaseMetadata metadata) throws KeywordSearchServiceException {
String caseDirectory = metadata.getCaseDirectory();
IndexMetadata indexMetadata;
try {
indexMetadata = new IndexMetadata(caseDirectory);
} catch (IndexMetadata.TextIndexMetadataException ex) {
logger.log(Level.WARNING, NbBundle.getMessage(SolrSearchService.class, "SolrSearchService.exceptionMessage.noIndexMetadata", caseDirectory), ex);
throw new KeywordSearchServiceException(NbBundle.getMessage(SolrSearchService.class, "SolrSearchService.exceptionMessage.noIndexMetadata", caseDirectory), ex);
}
//find the index for the current version of solr (the one we are connected to) and delete its core using the index name
String currentSchema = IndexFinder.getCurrentSchemaVersion();
String currentSolr = IndexFinder.getCurrentSolrVersion();
for (Index index : indexMetadata.getIndexes()) {
if (index.getSolrVersion().equals(currentSolr) && index.getSchemaVersion().equals(currentSchema)) {
/*
* Unload/delete the core on the server and then delete the text
* index files.
*/
KeywordSearch.getServer().deleteCore(index.getIndexName(), metadata);
if (!FileUtil.deleteDir(new File(index.getIndexPath()).getParentFile())) {
throw new KeywordSearchServiceException(Bundle.SolrSearchService_exceptionMessage_failedToDeleteIndexFiles(index.getIndexPath()));
}
}
return; //only one core exists for each combination of solr and schema version
}
//this code this code will only execute if an index for the current core was not found
logger.log(Level.WARNING, NbBundle.getMessage(SolrSearchService.class,
"SolrSearchService.exceptionMessage.noCurrentSolrCore"));
throw new KeywordSearchServiceException(NbBundle.getMessage(SolrSearchService.class,
"SolrSearchService.exceptionMessage.noCurrentSolrCore"));
}
@Override
public void close() throws IOException {
}
@Override
public String getServiceName() {
return NbBundle.getMessage(this.getClass(), "SolrSearchService.ServiceName");
}
/**
* Creates/opens the Solr core/text index for a case
*
* @param context The case context.
*
* @throws
* org.sleuthkit.autopsy.appservices.AutopsyService.AutopsyServiceException
*/
@Override
@NbBundle.Messages({
"SolrSearch.lookingForMetadata.msg=Looking for text index metadata file",
"SolrSearch.readingIndexes.msg=Reading text index metadata file",
"SolrSearch.findingIndexes.msg=Looking for existing text index directories",
"SolrSearch.creatingNewIndex.msg=Creating new text index",
"SolrSearch.checkingForLatestIndex.msg=Looking for text index with latest Solr and schema version",
"SolrSearch.indentifyingIndex.msg=Identifying text index to use",
"SolrSearch.openCore.msg=Opening text index",
"SolrSearch.openLargeCore.msg=Opening text index. This may take several minutes.",
"SolrSearch.openGiantCore.msg=Opening text index. Text index for this case is very large and may take long time to load.",
"SolrSearch.complete.msg=Text index successfully opened"})
public void openCaseResources(CaseContext context) throws AutopsyServiceException {
if (context.cancelRequested()) {
return;
}
ProgressIndicator progress = context.getProgressIndicator();
int totalNumProgressUnits = 7;
int progressUnitsCompleted = 0;
String caseDirPath = context.getCase().getCaseDirectory();
Case theCase = context.getCase();
List<Index> indexes = new ArrayList<>();
progress.start(Bundle.SolrSearch_lookingForMetadata_msg(), totalNumProgressUnits);
if (IndexMetadata.isMetadataFilePresent(caseDirPath)) {
try {
// metadata file exists, get list of existing Solr cores for this case
progressUnitsCompleted++;
progress.progress(Bundle.SolrSearch_findingIndexes_msg(), progressUnitsCompleted);
IndexMetadata indexMetadata = new IndexMetadata(caseDirPath);
indexes = indexMetadata.getIndexes();
} catch (IndexMetadata.TextIndexMetadataException ex) {
logger.log(Level.SEVERE, String.format("Unable to read text index metadata file"), ex);
throw new AutopsyServiceException("Unable to read text index metadata file", ex);
}
} else {
// metadata file doesn't exist.
// do case subdirectory search to look for Solr 4 Schema 1.8 indexes
progressUnitsCompleted++;
progress.progress(Bundle.SolrSearch_findingIndexes_msg(), progressUnitsCompleted);
Index oldIndex = IndexFinder.findOldIndexDir(theCase);
if (oldIndex != null) {
// add index to the list of indexes that exist for this case
indexes.add(oldIndex);
}
}
if (context.cancelRequested()) {
return;
}
// check if we found any existing indexes
Index currentVersionIndex = null;
if (indexes.isEmpty()) {
// new case that doesn't have an existing index. create new index folder
progressUnitsCompleted++;
progress.progress(Bundle.SolrSearch_creatingNewIndex_msg(), progressUnitsCompleted);
currentVersionIndex = IndexFinder.createLatestVersionIndexDir(theCase);
// add current index to the list of indexes that exist for this case
indexes.add(currentVersionIndex);
} else {
// check if one of the existing indexes is for latest Solr version and schema
progressUnitsCompleted++;
progress.progress(Bundle.SolrSearch_checkingForLatestIndex_msg(), progressUnitsCompleted);
currentVersionIndex = IndexFinder.findLatestVersionIndexDir(indexes);
if (currentVersionIndex == null) {
// found existing index(es) but none were for latest Solr version and schema version
progressUnitsCompleted++;
progress.progress(Bundle.SolrSearch_indentifyingIndex_msg(), progressUnitsCompleted);
Index indexToUse = IndexFinder.identifyIndexToUse(indexes);
if (indexToUse == null) {
// unable to find index that can be used
throw new AutopsyServiceException("Unable to find index that can be used for this case");
}
if (context.cancelRequested()) {
return;
}
double currentSolrVersion = NumberUtils.toDouble(IndexFinder.getCurrentSolrVersion());
double indexSolrVersion = NumberUtils.toDouble(indexToUse.getSolrVersion());
if (indexSolrVersion == currentSolrVersion) {
// latest Solr version but not latest schema. index should be used in read-only mode
if (RuntimeProperties.runningWithGUI()) {
// pop up a message box to indicate the read-only restrictions.
JOptionPane optionPane = new JOptionPane(
NbBundle.getMessage(this.getClass(), "SolrSearchService.IndexReadOnlyDialog.msg"),
JOptionPane.WARNING_MESSAGE,
JOptionPane.DEFAULT_OPTION);
try {
SwingUtilities.invokeAndWait(() -> {
JDialog dialog = optionPane.createDialog(NbBundle.getMessage(this.getClass(), "SolrSearchService.IndexReadOnlyDialog.title"));
dialog.setVisible(true);
});
} catch (InterruptedException ex) {
// Cancelled
return;
} catch (InvocationTargetException ex) {
throw new AutopsyServiceException("Error displaying limited search features warning dialog", ex);
}
}
// proceed with case open
currentVersionIndex = indexToUse;
} else {
// index needs to be upgraded to latest supported version of Solr
throw new AutopsyServiceException("Unable to find index to use for Case open");
}
}
}
try {
// update text index metadata file
if (!indexes.isEmpty()) {
IndexMetadata indexMetadata = new IndexMetadata(caseDirPath, indexes);
}
} catch (IndexMetadata.TextIndexMetadataException ex) {
throw new AutopsyServiceException("Failed to save Solr core info in text index metadata file", ex);
}
// open core
try {
// check text index size to gauge estimated time to open/load the index
long indexSizeInBytes = FileUtils.sizeOfDirectory(new File(currentVersionIndex.getIndexPath()));
long sizeInGb = indexSizeInBytes / 1000000000;
if (sizeInGb < LARGE_INDEX_SIZE_GB) {
progress.progress(Bundle.SolrSearch_openCore_msg(), totalNumProgressUnits - 1);
} else if (sizeInGb >= LARGE_INDEX_SIZE_GB && sizeInGb < GIANT_INDEX_SIZE_GB) {
progress.switchToIndeterminate(Bundle.SolrSearch_openLargeCore_msg());
} else {
progress.switchToIndeterminate(Bundle.SolrSearch_openGiantCore_msg());
}
KeywordSearch.getServer().openCoreForCase(theCase, currentVersionIndex);
} catch (KeywordSearchModuleException ex) {
throw new AutopsyServiceException(String.format("Failed to open or create core for %s", caseDirPath), ex);
}
progress.progress(Bundle.SolrSearch_complete_msg(), totalNumProgressUnits);
}
/**
* Closes the open core.
*
* @param context
*
* @throws
* org.sleuthkit.autopsy.appservices.AutopsyService.AutopsyServiceException
*/
@Override
public void closeCaseResources(CaseContext context) throws AutopsyServiceException {
/*
* TODO (JIRA 2525): The following code KeywordSearch.CaseChangeListener
* gambles that any BlackboardResultWriters (SwingWorkers) will complete
* in less than roughly two seconds. This stuff should be reworked using
* an ExecutorService and tasks with Futures.
*/
AdHocSearchChildFactory.BlackboardResultWriter.stopAllWriters();
try {
Thread.sleep(2000);
} catch (InterruptedException ex) {
logger.log(Level.SEVERE, "Unexpected interrupt while waiting for BlackboardResultWriters to terminate", ex);
}
try {
KeywordSearch.getServer().closeCore();
} catch (KeywordSearchModuleException ex) {
throw new AutopsyServiceException(String.format("Failed to close core for %s", context.getCase().getCaseDirectory()), ex);
}
}
/**
* Adds an artifact to the keyword search text index as a concantenation of
* all of its attributes.
*
* @param artifact The artifact to index.
*
* @throws org.sleuthkit.datamodel.TskCoreException
* @deprecated Call index(Content) instead.
*/
@Deprecated
@Override
public void indexArtifact(BlackboardArtifact artifact) throws TskCoreException {
if (artifact == null) {
return;
}
// We only support artifact indexing for Autopsy versions that use
// the negative range for artifact ids.
if (artifact.getArtifactID() > 0) {
return;
}
final Ingester ingester = Ingester.getDefault();
try {
ingester.indexMetaDataOnly(artifact);
ingester.indexText(new ArtifactTextExtractor(), artifact, null);
} catch (Ingester.IngesterException ex) {
throw new TskCoreException(ex.getCause().getMessage(), ex);
}
}
}
| |
/*
* Copyright 2022 Red Hat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.hal.core.runtime.host;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Provider;
import org.jboss.hal.ballroom.dialog.BlockingDialog;
import org.jboss.hal.ballroom.dialog.Dialog;
import org.jboss.hal.ballroom.dialog.DialogFactory;
import org.jboss.hal.ballroom.form.Form;
import org.jboss.hal.core.Core;
import org.jboss.hal.core.mbui.form.OperationFormBuilder;
import org.jboss.hal.core.runtime.Action;
import org.jboss.hal.core.runtime.Result;
import org.jboss.hal.core.runtime.Timeouts;
import org.jboss.hal.core.runtime.server.Server;
import org.jboss.hal.core.runtime.server.ServerActions;
import org.jboss.hal.dmr.Composite;
import org.jboss.hal.dmr.ModelNode;
import org.jboss.hal.dmr.Operation;
import org.jboss.hal.dmr.ResourceAddress;
import org.jboss.hal.dmr.dispatch.Dispatcher;
import org.jboss.hal.flow.Progress;
import org.jboss.hal.meta.AddressTemplate;
import org.jboss.hal.meta.Metadata;
import org.jboss.hal.meta.processing.MetadataProcessor;
import org.jboss.hal.meta.processing.MetadataProcessor.MetadataCallback;
import org.jboss.hal.resources.Ids;
import org.jboss.hal.resources.Resources;
import org.jboss.hal.spi.Callback;
import org.jboss.hal.spi.Footer;
import org.jboss.hal.spi.Message;
import org.jboss.hal.spi.MessageEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gwt.safehtml.shared.SafeHtml;
import com.google.web.bindery.event.shared.EventBus;
import rx.CompletableSubscriber;
import rx.Subscription;
import static elemental2.dom.DomGlobal.setTimeout;
import static java.util.Collections.emptyList;
import static org.jboss.hal.ballroom.dialog.Dialog.Size.MEDIUM;
import static org.jboss.hal.core.runtime.Timeouts.hostTimeout;
import static org.jboss.hal.dmr.ModelDescriptionConstants.*;
import static org.jboss.hal.dmr.dispatch.TimeoutHandler.repeatUntilTimeout;
import static org.jboss.hal.resources.UIConstants.LONG_TIMEOUT;
import static org.jboss.hal.resources.UIConstants.SHORT_TIMEOUT;
public class HostActions implements Timeouts {
private static final Logger logger = LoggerFactory.getLogger(HostActions.class);
private static AddressTemplate hostTemplate(Host host) {
return AddressTemplate.of("/host=" + host.getAddressName());
}
private final EventBus eventBus;
private final Dispatcher dispatcher;
private final MetadataProcessor metadataProcessor;
private final Provider<Progress> progress;
private final ServerActions serverActions;
private final Resources resources;
private final Map<String, Host> pendingHosts;
@Inject
public HostActions(EventBus eventBus,
Dispatcher dispatcher,
MetadataProcessor metadataProcessor,
@Footer Provider<Progress> progress,
ServerActions serverActions,
Resources resources) {
this.eventBus = eventBus;
this.dispatcher = dispatcher;
this.metadataProcessor = metadataProcessor;
this.progress = progress;
this.serverActions = serverActions;
this.resources = resources;
this.pendingHosts = new HashMap<>();
}
// ------------------------------------------------------ reload
public void reload(Host host) {
metadataProcessor.lookup(hostTemplate(host), progress.get(), new MetadataCallback() {
@Override
public void onMetadata(Metadata metadata) {
Form<ModelNode> form = new OperationFormBuilder<>(
Ids.build(RELOAD_HOST, host.getName(), Ids.FORM), metadata, RELOAD)
.include(RESTART_SERVERS)
.build();
SafeHtml question;
if (host.isDomainController()) {
question = resources.messages().reloadDomainControllerQuestion(host.getName());
} else {
question = resources.messages().reloadHostControllerQuestion(host.getName());
}
Dialog dialog = DialogFactory.buildConfirmation(
resources.messages().reload(host.getName()), question, form.element(), MEDIUM,
() -> {
form.save();
boolean restartServers = form.getModel().get(RESTART_SERVERS).asBoolean();
prepare(host, restartServers ? host.getServers(Server::isStarted) : emptyList(),
Action.RELOAD);
Operation operation = new Operation.Builder(host.getAddress(), RELOAD)
.param(RESTART_SERVERS, restartServers)
.build();
// execute the reload with a little delay to ensure the confirmation dialog is closed
// before the next dialog is opened (only one modal can be open at a time!)
setTimeout((o) -> {
if (host.isDomainController()) {
domainControllerOperation(host, operation, hostTimeout(host, Action.RELOAD),
restartServers ? host.getServers(Server::isStarted) : emptyList(),
resources.messages().reload(host.getName()),
resources.messages().reloadDomainControllerPending(host.getName()),
resources.messages().reloadHostSuccess(host.getName()),
resources.messages().reloadHostError(host.getName()),
resources.messages().domainControllerTimeout(host.getName()));
} else {
hostControllerOperation(host, operation, hostTimeout(host, Action.RELOAD),
restartServers ? host.getServers(Server::isStarted) : emptyList(),
resources.messages().reloadHostSuccess(host.getName()),
resources.messages().reloadHostError(host.getName()),
resources.messages().hostControllerTimeout(host.getName()));
}
}, SHORT_TIMEOUT);
});
dialog.registerAttachable(form);
dialog.show();
ModelNode model = new ModelNode();
model.get(RESTART_SERVERS).set(true);
form.edit(model);
}
@Override
public void onError(Throwable error) {
MessageEvent.fire(eventBus,
Message.error(resources.messages().metadataError(), error.getMessage()));
}
});
}
// ------------------------------------------------------ restart
public void restart(Host host) {
SafeHtml question = host.isDomainController()
? resources.messages().restartDomainControllerQuestion(host.getName())
: resources.messages().restartHostControllerQuestion(host.getName());
restart(host, question);
}
public void restart(Host host, SafeHtml question) {
DialogFactory.showConfirmation(resources.messages().restart(host.getName()), question, () -> {
// execute the restart with a little delay to ensure the confirmation dialog is closed
// before the next dialog is opened (only one modal can be open at a time!)
setTimeout((o) -> {
prepare(host, host.getServers(), Action.RESTART);
Operation operation = new Operation.Builder(host.getAddress(), SHUTDOWN)
.param(RESTART, true)
.build();
if (host.isDomainController()) {
domainControllerOperation(host, operation, hostTimeout(host, Action.RESTART), host.getServers(),
resources.messages().restart(host.getName()),
resources.messages().restartDomainControllerPending(host.getName()),
resources.messages().restartHostSuccess(host.getName()),
resources.messages().restartHostError(host.getName()),
resources.messages().domainControllerTimeout(host.getName()));
} else {
hostControllerOperation(host, operation, hostTimeout(host, Action.RESTART), host.getServers(),
resources.messages().restartHostSuccess(host.getName()),
resources.messages().restartHostError(host.getName()),
resources.messages().hostControllerTimeout(host.getName()));
}
}, SHORT_TIMEOUT);
});
}
// ------------------------------------------------------ helper methods
private void domainControllerOperation(Host host, Operation operation, int timeout, List<Server> servers,
String title, SafeHtml pendingMessage, SafeHtml successMessage, SafeHtml errorMessage,
SafeHtml timeoutMessage) {
BlockingDialog pendingDialog = DialogFactory.buildLongRunning(title, pendingMessage);
pendingDialog.show();
dispatcher.execute(operation, result -> repeatUntilTimeout(dispatcher, timeout, ping(host))
.subscribe(new CompletableSubscriber() {
@Override
public void onSubscribe(Subscription d) {
}
@Override
public void onCompleted() {
// wait a little bit before event handlers try to use the reloaded / restarted domain controller
setTimeout((o) -> {
pendingDialog.close();
finish(host, servers, Result.SUCCESS, Message.success(successMessage));
}, LONG_TIMEOUT);
}
@Override
public void onError(Throwable e) {
pendingDialog.close();
DialogFactory.buildBlocking(title, timeoutMessage).show();
finish(host, servers, Result.TIMEOUT, null);
}
}),
new HostFailedCallback(host, servers, errorMessage, pendingDialog::close),
new HostExceptionCallback(host, servers, errorMessage, pendingDialog::close));
}
private void hostControllerOperation(Host host, Operation operation, int timeout, List<Server> servers,
SafeHtml successMessage, SafeHtml errorMessage, SafeHtml timeoutMessage) {
dispatcher.execute(operation, result -> repeatUntilTimeout(dispatcher, timeout, ping(host))
.subscribe(new CompletableSubscriber() {
@Override
public void onSubscribe(Subscription d) {
}
@Override
public void onCompleted() {
finish(host, servers, Result.SUCCESS, Message.success(successMessage));
}
@Override
public void onError(Throwable e) {
finish(host, servers, Result.TIMEOUT, Message.error(timeoutMessage));
}
}),
new HostFailedCallback(host, servers, errorMessage, null),
new HostExceptionCallback(host, servers, errorMessage, null));
}
private void prepare(Host host, List<Server> servers, Action action) {
markAsPending(host); // mark as pending *before* firing the event!
servers.forEach(serverActions::markAsPending);
eventBus.fireEvent(new HostActionEvent(host, servers, action));
}
private void finish(Host host, List<Server> servers, Result result, Message message) {
clearPending(host); // clear pending state *before* firing the event!
servers.forEach(serverActions::clearPending);
eventBus.fireEvent(new HostResultEvent(host, servers, result));
if (message != null) {
MessageEvent.fire(eventBus, message);
}
}
private void markAsPending(Host host) {
Core.setPendingLifecycleAction(true);
pendingHosts.put(host.getName(), host);
logger.debug("Mark host {} as pending", host.getName());
}
private void clearPending(Host host) {
Core.setPendingLifecycleAction(false);
pendingHosts.remove(host.getName());
logger.debug("Clear pending state for host {}", host.getName());
}
public boolean isPending(Host host) {
return pendingHosts.containsKey(host.getName());
}
private Operation ping(Host host) {
ResourceAddress address = new ResourceAddress()
.add(HOST, host.getName()); // do not use host.getAddressName() here!
Operation operation = new Operation.Builder(address, READ_RESOURCE_OPERATION).build();
if (host.hasServers(Server::isStarted)) {
Operation[] operations = host.getServers(Server::isStarted).stream()
.map(server -> {
ResourceAddress serverAddress = host.getAddress().add(SERVER, server.getName());
return new Operation.Builder(serverAddress, READ_RESOURCE_OPERATION).build();
}).toArray(Operation[]::new);
operation = new Composite(operation, operations);
} else {
operation = new Operation.Builder(address, READ_RESOURCE_OPERATION).build();
}
return operation;
}
private class HostFailedCallback implements Dispatcher.OnFail {
private final Host host;
private final List<Server> servers;
private final SafeHtml errorMessage;
private final Callback cleanup;
HostFailedCallback(Host host, List<Server> servers, SafeHtml errorMessage, Callback cleanup) {
this.host = host;
this.servers = servers;
this.errorMessage = errorMessage;
this.cleanup = cleanup;
}
@Override
public void onFailed(Operation operation, String failure) {
finish(host, servers, Result.ERROR, Message.error(errorMessage, failure));
if (cleanup != null) {
cleanup.execute();
}
}
}
private class HostExceptionCallback implements Dispatcher.OnError {
private final Host host;
private final List<Server> servers;
private final SafeHtml errorMessage;
private final Callback cleanup;
HostExceptionCallback(Host host, List<Server> servers, SafeHtml errorMessage, Callback cleanup) {
this.host = host;
this.servers = servers;
this.errorMessage = errorMessage;
this.cleanup = cleanup;
}
@Override
public void onException(Operation operation, Throwable exception) {
finish(host, servers, Result.ERROR, Message.error(errorMessage, exception.getMessage()));
if (cleanup != null) {
cleanup.execute();
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.